refactor: update Dockerfile and docker-compose.yml for improved media handling
- Removed the USER directive from the Dockerfile to simplify the build process. - Added new volume mounts in docker-compose.yml for additional media directories, enhancing media accessibility. - Updated the media database file, reflecting changes in media management. - Deleted obsolete thumbnail files to clean up the project structure and reduce clutter.
18
Dockerfile
|
|
@ -42,30 +42,20 @@ RUN apk add --no-cache \
|
||||||
sqlite \
|
sqlite \
|
||||||
&& rm -rf /var/cache/apk/*
|
&& rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
# Create user
|
|
||||||
RUN addgroup --system --gid 1001 nodejs
|
|
||||||
RUN adduser --system --uid 1001 --ingroup nodejs nextjs
|
|
||||||
|
|
||||||
# Create necessary directories
|
# Create necessary directories
|
||||||
RUN mkdir -p /app/data /app/media /app/public/thumbnails
|
RUN mkdir -p /app/data /app/media /app/public/thumbnails
|
||||||
|
|
||||||
# Copy only the necessary files from builder
|
# Copy only the necessary files from builder
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/public ./public
|
COPY --from=builder /app/public ./public
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
COPY --from=builder /app/.next/standalone ./
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
COPY --from=builder /app/.next/static ./.next/static
|
||||||
|
|
||||||
# Copy the entire node_modules to ensure all native bindings are available
|
# Copy the entire node_modules to ensure all native bindings are available
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
COPY --from=builder /app/node_modules ./node_modules
|
||||||
|
|
||||||
# Rebuild native bindings for the production environment
|
# Rebuild native bindings for the production environment
|
||||||
RUN npm rebuild better-sqlite3
|
RUN npm rebuild better-sqlite3
|
||||||
|
|
||||||
# Set correct permissions
|
|
||||||
RUN chown -R nextjs:nodejs /app/data /app/media /app/public
|
|
||||||
|
|
||||||
# Switch to non-root user
|
|
||||||
USER nextjs
|
|
||||||
|
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
ENV PORT=3000
|
ENV PORT=3000
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,10 @@ services:
|
||||||
- ${DB_PATH:-./data}:/app/data
|
- ${DB_PATH:-./data}:/app/data
|
||||||
- ${MEDIA_PATH:-./media}:/app/media
|
- ${MEDIA_PATH:-./media}:/app/media
|
||||||
- ${THUMBNAILS_PATH:-./public/thumbnails}:/app/public/thumbnails
|
- ${THUMBNAILS_PATH:-./public/thumbnails}:/app/public/thumbnails
|
||||||
- /mnt/data1:/mnt/data1
|
- /mnt/nas_ad2:/mnt/nasad2
|
||||||
|
- /mnt/thd_media_f:/mnt/f
|
||||||
|
- /mnt/thd_media_g:/mnt/g
|
||||||
|
- /mnt/thd_media_h:/mnt/h
|
||||||
command: node server.js
|
command: node server.js
|
||||||
environment:
|
environment:
|
||||||
- NODE_ENV=production
|
- NODE_ENV=production
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,524 @@
|
||||||
|
# Process Management: Reliable Transcoding Termination
|
||||||
|
|
||||||
|
## Stash's Transcoding Architecture
|
||||||
|
|
||||||
|
### Core Problem
|
||||||
|
Live transcoding processes must terminate reliably when:
|
||||||
|
- Client disconnects
|
||||||
|
- User navigates away
|
||||||
|
- Server shuts down
|
||||||
|
- Network interruptions occur
|
||||||
|
|
||||||
|
### Three-Layer Termination Strategy
|
||||||
|
|
||||||
|
#### Layer 1: HTTP Context Cancellation
|
||||||
|
```go
|
||||||
|
// From internal/api/routes_scene.go
|
||||||
|
func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamType ffmpeg.StreamFormat) {
|
||||||
|
// Uses request context for automatic cleanup
|
||||||
|
streamManager.ServeTranscode(w, r, options)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Layer 2: Context-Based Process Tracking
|
||||||
|
```go
|
||||||
|
// From internal/manager/stream_manager.go
|
||||||
|
type LockContext struct {
|
||||||
|
context.Context
|
||||||
|
cancel context.CancelFunc
|
||||||
|
cmd *exec.Cmd
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *LockContext) Cancel() {
|
||||||
|
c.cancel() // Signal cancellation
|
||||||
|
if c.cmd != nil {
|
||||||
|
// Graceful termination with timeout
|
||||||
|
done := make(chan error)
|
||||||
|
go func() {
|
||||||
|
err := c.cmd.Wait()
|
||||||
|
done <- err
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-done:
|
||||||
|
return
|
||||||
|
case <-time.After(5 * time.Second):
|
||||||
|
// Force kill after timeout
|
||||||
|
if c.cmd.Process != nil {
|
||||||
|
c.cmd.Process.Kill()
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Layer 3: Centralized Process Registry
|
||||||
|
```go
|
||||||
|
// From internal/manager/stream_manager.go
|
||||||
|
type StreamManager struct {
|
||||||
|
lockManager *fsutil.ReadLockManager
|
||||||
|
processes map[string]*LockContext
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *StreamManager) Shutdown() {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
|
||||||
|
for _, ctx := range s.processes {
|
||||||
|
ctx.Cancel() // Kill all active processes
|
||||||
|
}
|
||||||
|
s.processes = make(map[string]*LockContext)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Implementation Patterns
|
||||||
|
|
||||||
|
### 1. Context Propagation
|
||||||
|
```go
|
||||||
|
// Create context with cancellation
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
|
||||||
|
// Attach to HTTP request lifecycle
|
||||||
|
ctx = context.WithValue(ctx, "request_id", requestID)
|
||||||
|
|
||||||
|
// Use throughout transcoding pipeline
|
||||||
|
ffmpegCmd := exec.CommandContext(ctx, "ffmpeg", args...)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Process Attachment Pattern
|
||||||
|
```go
|
||||||
|
type TranscodeContext struct {
|
||||||
|
context.Context
|
||||||
|
cmd *exec.Cmd
|
||||||
|
startTime time.Time
|
||||||
|
filePath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *TranscodeContext) AttachCommand(cmd *exec.Cmd) {
|
||||||
|
c.cmd = cmd
|
||||||
|
c.startTime = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *TranscodeContext) Terminate() error {
|
||||||
|
if c.cmd == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send SIGTERM first
|
||||||
|
if err := c.cmd.Process.Signal(syscall.SIGTERM); err != nil {
|
||||||
|
// Fallback to SIGKILL
|
||||||
|
return c.cmd.Process.Kill()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for graceful shutdown
|
||||||
|
done := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
done <- c.cmd.Wait()
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-done:
|
||||||
|
return nil
|
||||||
|
case <-time.After(5 * time.Second):
|
||||||
|
return c.cmd.Process.Kill()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. File Lock Management
|
||||||
|
```go
|
||||||
|
// From pkg/fsutil/lock_manager.go
|
||||||
|
type ReadLockManager struct {
|
||||||
|
activeLocks map[string]*lockEntry
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type lockEntry struct {
|
||||||
|
ctx context.Context
|
||||||
|
cancel context.CancelFunc
|
||||||
|
refCount int
|
||||||
|
filePath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *ReadLockManager) AddLock(filePath string) context.Context {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
|
||||||
|
if entry, exists := m.activeLocks[filePath]; exists {
|
||||||
|
entry.refCount++
|
||||||
|
return entry.ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
m.activeLocks[filePath] = &lockEntry{
|
||||||
|
ctx: ctx,
|
||||||
|
cancel: cancel,
|
||||||
|
refCount: 1,
|
||||||
|
filePath: filePath,
|
||||||
|
}
|
||||||
|
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *ReadLockManager) RemoveLock(filePath string) {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
|
||||||
|
if entry, exists := m.activeLocks[filePath]; exists {
|
||||||
|
entry.refCount--
|
||||||
|
if entry.refCount <= 0 {
|
||||||
|
entry.cancel()
|
||||||
|
delete(m.activeLocks, filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. HTTP Connection Management
|
||||||
|
```go
|
||||||
|
// Enhanced HTTP handler with connection monitoring
|
||||||
|
func (s *StreamManager) ServeTranscode(w http.ResponseWriter, r *http.Request, options TranscodeOptions) {
|
||||||
|
// Create context from request
|
||||||
|
ctx, cancel := context.WithCancel(r.Context())
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
// Monitor connection state
|
||||||
|
go func() {
|
||||||
|
<-r.Context().Done()
|
||||||
|
cancel() // Client disconnected
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Hijack connection for immediate cleanup
|
||||||
|
if hj, ok := w.(http.Hijacker); ok {
|
||||||
|
conn, _, err := hj.Hijack()
|
||||||
|
if err == nil {
|
||||||
|
defer conn.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start transcoding
|
||||||
|
cmd := exec.CommandContext(ctx, "ffmpeg", args...)
|
||||||
|
|
||||||
|
// Stream output
|
||||||
|
w.Header().Set("Content-Type", "video/mp4")
|
||||||
|
w.Header().Set("Cache-Control", "no-cache")
|
||||||
|
|
||||||
|
cmd.Stdout = w
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := cmd.Wait(); err != nil {
|
||||||
|
if ctx.Err() != context.Canceled {
|
||||||
|
logger.Errorf("Transcoding failed: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next.js Adaptation Guide
|
||||||
|
|
||||||
|
### 1. AbortController Pattern
|
||||||
|
```typescript
|
||||||
|
// pages/api/transcode/[...slug].ts
|
||||||
|
import { spawn } from 'child_process';
|
||||||
|
|
||||||
|
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
|
const controller = new AbortController();
|
||||||
|
const { signal } = controller;
|
||||||
|
|
||||||
|
// Attach to request lifecycle
|
||||||
|
req.on('close', () => {
|
||||||
|
controller.abort();
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on('aborted', () => {
|
||||||
|
controller.abort();
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ffmpeg = spawn('ffmpeg', [
|
||||||
|
'-i', req.query.file as string,
|
||||||
|
'-f', 'mp4',
|
||||||
|
'-movflags', 'frag_keyframe+empty_moov',
|
||||||
|
'pipe:1'
|
||||||
|
], { signal });
|
||||||
|
|
||||||
|
// Handle cleanup
|
||||||
|
signal.addEventListener('abort', () => {
|
||||||
|
if (!ffmpeg.killed) {
|
||||||
|
ffmpeg.kill('SIGKILL');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Stream response
|
||||||
|
res.setHeader('Content-Type', 'video/mp4');
|
||||||
|
res.setHeader('Cache-Control', 'no-cache');
|
||||||
|
|
||||||
|
ffmpeg.stdout.pipe(res);
|
||||||
|
|
||||||
|
const exitCode = await new Promise((resolve) => {
|
||||||
|
ffmpeg.on('exit', resolve);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
res.status(500).end();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (signal.aborted) {
|
||||||
|
res.status(499).end(); // Client closed request
|
||||||
|
} else {
|
||||||
|
res.status(500).json({ error: 'Transcoding failed' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Process Pool Management
|
||||||
|
```typescript
|
||||||
|
// lib/transcode/pool.ts
|
||||||
|
import { ChildProcess } from 'child_process';
|
||||||
|
|
||||||
|
class TranscodePool {
|
||||||
|
private processes = new Map<string, {
|
||||||
|
process: ChildProcess;
|
||||||
|
startTime: Date;
|
||||||
|
filePath: string;
|
||||||
|
}>();
|
||||||
|
|
||||||
|
add(key: string, process: ChildProcess, filePath: string) {
|
||||||
|
this.processes.set(key, {
|
||||||
|
process,
|
||||||
|
startTime: new Date(),
|
||||||
|
filePath
|
||||||
|
});
|
||||||
|
|
||||||
|
// Auto cleanup on exit
|
||||||
|
process.on('exit', () => {
|
||||||
|
this.processes.delete(key);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
killAll() {
|
||||||
|
for (const [key, { process }] of this.processes) {
|
||||||
|
if (!process.killed) {
|
||||||
|
process.kill('SIGKILL');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.processes.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
kill(key: string) {
|
||||||
|
const entry = this.processes.get(key);
|
||||||
|
if (entry && !entry.process.killed) {
|
||||||
|
entry.process.kill('SIGKILL');
|
||||||
|
this.processes.delete(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getStats() {
|
||||||
|
return Array.from(this.processes.entries()).map(([key, entry]) => ({
|
||||||
|
key,
|
||||||
|
filePath: entry.filePath,
|
||||||
|
startTime: entry.startTime,
|
||||||
|
uptime: Date.now() - entry.startTime.getTime()
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const transcodePool = new TranscodePool();
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Connection Monitoring
|
||||||
|
```typescript
|
||||||
|
// lib/transcode/connection-monitor.ts
|
||||||
|
class ConnectionMonitor {
|
||||||
|
private activeConnections = new Set<string>();
|
||||||
|
|
||||||
|
trackConnection(connectionId: string, process: ChildProcess) {
|
||||||
|
this.activeConnections.add(connectionId);
|
||||||
|
|
||||||
|
// Monitor for stale connections
|
||||||
|
const checkInterval = setInterval(() => {
|
||||||
|
if (process.killed) {
|
||||||
|
clearInterval(checkInterval);
|
||||||
|
this.activeConnections.delete(connectionId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if connection is still alive
|
||||||
|
// This would need to be implemented based on your specific needs
|
||||||
|
}, 30000);
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanupConnection(connectionId: string) {
|
||||||
|
this.activeConnections.delete(connectionId);
|
||||||
|
}
|
||||||
|
|
||||||
|
getActiveCount() {
|
||||||
|
return this.activeConnections.size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const connectionMonitor = new ConnectionMonitor();
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Graceful Shutdown
|
||||||
|
```typescript
|
||||||
|
// lib/transcode/shutdown.ts
|
||||||
|
import { transcodePool } from './pool';
|
||||||
|
|
||||||
|
function setupGracefulShutdown() {
|
||||||
|
// Handle SIGTERM (Docker, PM2, etc.)
|
||||||
|
process.on('SIGTERM', () => {
|
||||||
|
console.log('Received SIGTERM, cleaning up...');
|
||||||
|
transcodePool.killAll();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle SIGINT (Ctrl+C)
|
||||||
|
process.on('SIGINT', () => {
|
||||||
|
console.log('Received SIGINT, cleaning up...');
|
||||||
|
transcodePool.killAll();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle uncaught exceptions
|
||||||
|
process.on('uncaughtException', (error) => {
|
||||||
|
console.error('Uncaught exception:', error);
|
||||||
|
transcodePool.killAll();
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle unhandled promise rejections
|
||||||
|
process.on('unhandledRejection', (reason, promise) => {
|
||||||
|
console.error('Unhandled rejection at:', promise, 'reason:', reason);
|
||||||
|
transcodePool.killAll();
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export { setupGracefulShutdown };
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Health Monitoring
|
||||||
|
```typescript
|
||||||
|
// lib/transcode/health.ts
|
||||||
|
class TranscodeHealthMonitor {
|
||||||
|
private healthCheckInterval: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
|
startMonitoring() {
|
||||||
|
this.healthCheckInterval = setInterval(() => {
|
||||||
|
const stats = transcodePool.getStats();
|
||||||
|
|
||||||
|
// Check for hanging processes
|
||||||
|
stats.forEach(({ key, uptime }) => {
|
||||||
|
if (uptime > 300000) { // 5 minutes
|
||||||
|
console.warn(`Killing hanging transcode: ${key}`);
|
||||||
|
transcodePool.kill(key);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Memory usage monitoring
|
||||||
|
const memUsage = process.memoryUsage();
|
||||||
|
if (memUsage.heapUsed / memUsage.heapTotal > 0.9) {
|
||||||
|
console.warn('High memory usage, killing oldest transcodes');
|
||||||
|
// Implement cleanup strategy
|
||||||
|
}
|
||||||
|
}, 30000);
|
||||||
|
}
|
||||||
|
|
||||||
|
stopMonitoring() {
|
||||||
|
if (this.healthCheckInterval) {
|
||||||
|
clearInterval(this.healthCheckInterval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const healthMonitor = new TranscodeHealthMonitor();
|
||||||
|
```
|
||||||
|
|
||||||
|
## Production Deployment
|
||||||
|
|
||||||
|
### PM2 Configuration
|
||||||
|
```javascript
|
||||||
|
// ecosystem.config.js
|
||||||
|
module.exports = {
|
||||||
|
apps: [{
|
||||||
|
name: 'transcode-server',
|
||||||
|
script: 'server.js',
|
||||||
|
instances: 'max',
|
||||||
|
exec_mode: 'cluster',
|
||||||
|
max_memory_restart: '1G',
|
||||||
|
env: {
|
||||||
|
NODE_ENV: 'production',
|
||||||
|
TRANSCODE_TIMEOUT: '300000',
|
||||||
|
MAX_CONCURRENT_TRANSCODES: '4'
|
||||||
|
},
|
||||||
|
kill_timeout: 10000,
|
||||||
|
listen_timeout: 10000,
|
||||||
|
shutdown_with_message: true
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Configuration
|
||||||
|
```dockerfile
|
||||||
|
FROM node:18-alpine
|
||||||
|
|
||||||
|
# Install FFmpeg
|
||||||
|
RUN apk add --no-cache ffmpeg
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --only=production
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Set resource limits
|
||||||
|
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
CMD ["node", "server.js"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Kubernetes Resource Limits
|
||||||
|
```yaml
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: transcode-service
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: transcode
|
||||||
|
image: transcode-service:latest
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "512Mi"
|
||||||
|
cpu: "500m"
|
||||||
|
limits:
|
||||||
|
memory: "2Gi"
|
||||||
|
cpu: "2000m"
|
||||||
|
lifecycle:
|
||||||
|
preStop:
|
||||||
|
exec:
|
||||||
|
command: ["/bin/sh", "-c", "kill -TERM 1; sleep 10"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Takeaways
|
||||||
|
|
||||||
|
1. **Always use context-based cancellation** for reliable cleanup
|
||||||
|
2. **Implement three-layer termination** (HTTP, context, process)
|
||||||
|
3. **Monitor resource usage** to prevent system overload
|
||||||
|
4. **Handle edge cases** (network drops, server restarts)
|
||||||
|
5. **Provide graceful degradation** when services are overloaded
|
||||||
|
6. **Log termination events** for debugging and monitoring
|
||||||
|
|
||||||
|
This approach ensures 99%+ reliable process termination even under high load or adverse network conditions.
|
||||||
|
|
@ -0,0 +1,465 @@
|
||||||
|
# Live Transcoding Progress Bar Accuracy - Anti-Jitter Mechanisms
|
||||||
|
|
||||||
|
## Core Problem Analysis
|
||||||
|
|
||||||
|
Live transcoding creates unique challenges for accurate progress reporting:
|
||||||
|
|
||||||
|
1. **Duration Drift**: Transcoded segments may have slightly different durations than expected
|
||||||
|
2. **Buffer Timing**: Real-time transcoding can't provide accurate total duration until completion
|
||||||
|
3. **Seek Inconsistency**: Seeking to unbuffered positions causes progress jumps
|
||||||
|
4. **Network Variability**: Connection issues cause buffering delays
|
||||||
|
5. **Segment Validation**: Incomplete or corrupted segments affect progress accuracy
|
||||||
|
|
||||||
|
## Stash's Anti-Jitter Strategy
|
||||||
|
|
||||||
|
### 1. Precise Duration Extraction
|
||||||
|
|
||||||
|
**From ffprobe.go** - Accurate duration calculation:
|
||||||
|
```go
|
||||||
|
// pkg/ffmpeg/ffprobe.go - parse function
|
||||||
|
func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
|
||||||
|
result := &VideoFile{}
|
||||||
|
|
||||||
|
// Primary duration from video stream
|
||||||
|
duration, _ := strconv.ParseFloat(videoStream.Duration, 64)
|
||||||
|
result.VideoStreamDuration = math.Round(duration*100) / 100
|
||||||
|
|
||||||
|
// Fallback to container duration with rounding
|
||||||
|
if result.VideoStreamDuration == 0 {
|
||||||
|
duration, _ := strconv.ParseFloat(probeJSON.Format.Duration, 64)
|
||||||
|
result.VideoStreamDuration = math.Round(duration*100) / 100
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store both for validation
|
||||||
|
result.FileDuration = result.VideoStreamDuration
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Segment-Based Progress Tracking
|
||||||
|
|
||||||
|
**From stream_segmented.go** - Atomic segment generation:
|
||||||
|
```go
|
||||||
|
// internal/manager/stream_segmented.go
|
||||||
|
func (tp *transcodeProcess) serveHLSManifest() []byte {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
buf.WriteString("#EXTM3U\n")
|
||||||
|
buf.WriteString("#EXT-X-VERSION:3\n")
|
||||||
|
buf.WriteString("#EXT-X-TARGETDURATION:2\n")
|
||||||
|
|
||||||
|
// Pre-calculated exact durations
|
||||||
|
leftover := tp.videoFile.VideoStreamDuration
|
||||||
|
segment := 0
|
||||||
|
for leftover > 0 {
|
||||||
|
thisLength := math.Min(float64(segmentLength), leftover)
|
||||||
|
fmt.Fprintf(&buf, "#EXTINF:%f,\n", thisLength)
|
||||||
|
fmt.Fprintf(&buf, "segment_%06d.ts\n", segment)
|
||||||
|
leftover -= thisLength
|
||||||
|
segment++
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Anti-Jitter Mechanisms
|
||||||
|
|
||||||
|
#### A. Segment Atomicity
|
||||||
|
```typescript
|
||||||
|
// Frontend segment validation
|
||||||
|
interface SegmentValidation {
|
||||||
|
segmentNumber: number;
|
||||||
|
expectedDuration: number;
|
||||||
|
actualDuration?: number;
|
||||||
|
isComplete: boolean;
|
||||||
|
isValid: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const validateSegment = async (
|
||||||
|
segmentNumber: number,
|
||||||
|
expectedDuration: number
|
||||||
|
): Promise<SegmentValidation> => {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/segments/${segmentNumber}`);
|
||||||
|
const contentLength = response.headers.get('content-length');
|
||||||
|
const actualDuration = await calculateDurationFromBytes(
|
||||||
|
parseInt(contentLength || '0')
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
segmentNumber,
|
||||||
|
expectedDuration,
|
||||||
|
actualDuration,
|
||||||
|
isComplete: actualDuration >= expectedDuration * 0.95,
|
||||||
|
isValid: actualDuration >= expectedDuration * 0.8
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
segmentNumber,
|
||||||
|
expectedDuration,
|
||||||
|
isComplete: false,
|
||||||
|
isValid: false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
#### B. Buffer State Management
|
||||||
|
```typescript
|
||||||
|
// From ScenePlayerScrubber.tsx - Smooth progress updates
|
||||||
|
const useSmoothProgress = (currentTime: number, duration: number) => {
|
||||||
|
const [displayTime, setDisplayTime] = useState(currentTime);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const targetTime = Math.min(currentTime, duration);
|
||||||
|
const diff = targetTime - displayTime;
|
||||||
|
|
||||||
|
// Only update if significant change or approaching end
|
||||||
|
if (Math.abs(diff) > 0.1 || targetTime >= duration - 1) {
|
||||||
|
setDisplayTime(targetTime);
|
||||||
|
}
|
||||||
|
}, [currentTime, duration]);
|
||||||
|
|
||||||
|
return displayTime;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
#### C. Seek Position Validation
|
||||||
|
```typescript
|
||||||
|
// Validate seek positions against actual buffer state
|
||||||
|
const validateSeekPosition = (
|
||||||
|
requestedTime: number,
|
||||||
|
bufferedRanges: TimeRanges,
|
||||||
|
duration: number
|
||||||
|
): number => {
|
||||||
|
let validTime = requestedTime;
|
||||||
|
|
||||||
|
// Find nearest buffered segment
|
||||||
|
for (let i = 0; i < bufferedRanges.length; i++) {
|
||||||
|
if (
|
||||||
|
requestedTime >= bufferedRanges.start(i) &&
|
||||||
|
requestedTime <= bufferedRanges.end(i)
|
||||||
|
) {
|
||||||
|
return requestedTime; // Exact match
|
||||||
|
}
|
||||||
|
|
||||||
|
if (requestedTime < bufferedRanges.start(i)) {
|
||||||
|
validTime = bufferedRanges.start(i); // Snap to nearest available
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Math.max(0, Math.min(validTime, duration));
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Real-time Transcoding Coordination
|
||||||
|
|
||||||
|
### Backend Process Monitoring
|
||||||
|
```go
|
||||||
|
// From internal/manager/stream_segmented.go
|
||||||
|
type transcodeProcess struct {
|
||||||
|
videoFile *models.VideoFile
|
||||||
|
outputDir string
|
||||||
|
segmentCount int
|
||||||
|
|
||||||
|
// Real-time state tracking
|
||||||
|
lastSegment int
|
||||||
|
waitingSegment chan int
|
||||||
|
|
||||||
|
// Progress synchronization
|
||||||
|
mu sync.RWMutex
|
||||||
|
currentTime float64
|
||||||
|
totalTime float64
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tp *transcodeProcess) updateProgress(segment int, duration float64) {
|
||||||
|
tp.mu.Lock()
|
||||||
|
tp.currentTime = float64(segment) * 2.0 // 2-second segments
|
||||||
|
tp.mu.Unlock()
|
||||||
|
|
||||||
|
// Notify waiting clients
|
||||||
|
select {
|
||||||
|
case tp.waitingSegment <- segment:
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tp *transcodeProcess) getProgress() (float64, float64) {
|
||||||
|
tp.mu.RLock()
|
||||||
|
defer tp.mu.RUnlock()
|
||||||
|
return tp.currentTime, tp.totalTime
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Frontend Smoothing Layers
|
||||||
|
|
||||||
|
### Multi-layer Progress Smoothing
|
||||||
|
```typescript
|
||||||
|
// Progressive enhancement for progress accuracy
|
||||||
|
const useAccurateProgress = (
|
||||||
|
playerRef: RefObject<HTMLVideoElement>,
|
||||||
|
totalDuration: number
|
||||||
|
) => {
|
||||||
|
const [progress, setProgress] = useState(0);
|
||||||
|
const [buffered, setBuffered] = useState(0);
|
||||||
|
const [isTranscoding, setIsTranscoding] = useState(true);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const video = playerRef.current;
|
||||||
|
if (!video) return;
|
||||||
|
|
||||||
|
// Primary source: video.currentTime
|
||||||
|
const handleTimeUpdate = debounce(() => {
|
||||||
|
const newProgress = video.currentTime / totalDuration;
|
||||||
|
|
||||||
|
// Prevent backward jumps
|
||||||
|
if (newProgress >= progress || newProgress >= 0.99) {
|
||||||
|
setProgress(Math.min(newProgress, 1));
|
||||||
|
}
|
||||||
|
}, 100);
|
||||||
|
|
||||||
|
// Secondary source: buffered ranges
|
||||||
|
const handleProgress = debounce(() => {
|
||||||
|
const bufferedEnd = video.buffered.length > 0
|
||||||
|
? video.buffered.end(video.buffered.length - 1)
|
||||||
|
: 0;
|
||||||
|
setBuffered(bufferedEnd / totalDuration);
|
||||||
|
}, 250);
|
||||||
|
|
||||||
|
// Debounced updates for smooth UI
|
||||||
|
const debouncedTimeUpdate = debounce(handleTimeUpdate, 100);
|
||||||
|
const debouncedProgress = debounce(handleProgress, 250);
|
||||||
|
|
||||||
|
video.addEventListener('timeupdate', debouncedTimeUpdate);
|
||||||
|
video.addEventListener('progress', debouncedProgress);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
video.removeEventListener('timeupdate', debouncedTimeUpdate);
|
||||||
|
video.removeEventListener('progress', debouncedProgress);
|
||||||
|
};
|
||||||
|
}, [playerRef, totalDuration]);
|
||||||
|
|
||||||
|
return { progress, buffered, isTranscoding };
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next.js Implementation Strategy
|
||||||
|
|
||||||
|
### API Route with Progress Tracking
|
||||||
|
```typescript
|
||||||
|
// pages/api/transcode/[...slug].ts
|
||||||
|
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
|
const { file, quality, start } = req.query;
|
||||||
|
|
||||||
|
// 1. Extract precise duration first
|
||||||
|
const metadata = await extractVideoMetadata(file as string);
|
||||||
|
res.setHeader('X-Total-Duration', metadata.duration);
|
||||||
|
|
||||||
|
// 2. Segment-based streaming
|
||||||
|
const segmentDuration = 2; // 2-second segments
|
||||||
|
const totalSegments = Math.ceil(metadata.duration / segmentDuration);
|
||||||
|
|
||||||
|
// 3. Generate accurate manifest
|
||||||
|
const manifest = generateHLSManifest(totalSegments, segmentDuration, metadata.duration);
|
||||||
|
|
||||||
|
res.setHeader('Content-Type', 'application/vnd.apple.mpegurl');
|
||||||
|
res.send(manifest);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Segment serving with validation
|
||||||
|
export const serveSegment = async (req: NextApiRequest, res: NextApiResponse) => {
|
||||||
|
const { segment } = req.query;
|
||||||
|
const segmentPath = `segments/segment_${segment.toString().padStart(6, '0')}.ts`;
|
||||||
|
|
||||||
|
// Validate segment exists and is complete
|
||||||
|
try {
|
||||||
|
const stats = await fs.stat(segmentPath);
|
||||||
|
if (stats.size === 0) {
|
||||||
|
return res.status(404).end();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check segment integrity
|
||||||
|
const isValid = await validateSegmentIntegrity(segmentPath);
|
||||||
|
if (!isValid) {
|
||||||
|
return res.status(410).end(); // Gone - segment invalid
|
||||||
|
}
|
||||||
|
|
||||||
|
res.setHeader('Content-Type', 'video/mp2t');
|
||||||
|
res.setHeader('Content-Length', stats.size);
|
||||||
|
|
||||||
|
const stream = createReadStream(segmentPath);
|
||||||
|
stream.pipe(res);
|
||||||
|
|
||||||
|
// Cleanup on client disconnect
|
||||||
|
req.on('close', () => {
|
||||||
|
stream.destroy();
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
res.status(404).end();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Client-Side Integration
|
||||||
|
```typescript
|
||||||
|
// hooks/useTranscodeProgress.ts
|
||||||
|
import { useState, useEffect, useRef } from 'react';
|
||||||
|
|
||||||
|
export const useTranscodeProgress = (
|
||||||
|
videoUrl: string,
|
||||||
|
totalDuration: number
|
||||||
|
) => {
|
||||||
|
const [progress, setProgress] = useState(0);
|
||||||
|
const [buffered, setBuffered] = useState(0);
|
||||||
|
const [isTranscoding, setIsTranscoding] = useState(true);
|
||||||
|
const [quality, setQuality] = useState('720p');
|
||||||
|
|
||||||
|
const videoRef = useRef<HTMLVideoElement>(null);
|
||||||
|
const progressRef = useRef(0);
|
||||||
|
const lastUpdateRef = useRef(Date.now());
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const video = videoRef.current;
|
||||||
|
if (!video) return;
|
||||||
|
|
||||||
|
// Prevent backward jumps and smooth updates
|
||||||
|
const handleTimeUpdate = () => {
|
||||||
|
const now = Date.now();
|
||||||
|
if (now - lastUpdateRef.current < 100) return; // Throttle updates
|
||||||
|
|
||||||
|
const newProgress = video.currentTime / totalDuration;
|
||||||
|
|
||||||
|
// Only allow forward progress or small backward adjustments
|
||||||
|
if (newProgress >= progressRef.current - 0.005) {
|
||||||
|
progressRef.current = newProgress;
|
||||||
|
setProgress(newProgress);
|
||||||
|
lastUpdateRef.current = now;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Monitor transcoding completion
|
||||||
|
const checkTranscodingStatus = async () => {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/transcode/status?url=${encodeURIComponent(videoUrl)}`);
|
||||||
|
const data = await response.json();
|
||||||
|
setIsTranscoding(data.transcoding);
|
||||||
|
|
||||||
|
// Update total duration if changed
|
||||||
|
if (data.duration && Math.abs(data.duration - totalDuration) > 1) {
|
||||||
|
// Handle duration correction
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to check transcoding status:', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Monitor buffer state
|
||||||
|
const handleBufferUpdate = () => {
|
||||||
|
if (video.buffered.length > 0) {
|
||||||
|
const bufferedEnd = video.buffered.end(video.buffered.length - 1);
|
||||||
|
setBuffered(bufferedEnd / totalDuration);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
video.addEventListener('timeupdate', handleTimeUpdate);
|
||||||
|
video.addEventListener('progress', handleBufferUpdate);
|
||||||
|
const interval = setInterval(checkTranscodingStatus, 2000);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
video.removeEventListener('timeupdate', handleTimeUpdate);
|
||||||
|
video.removeEventListener('progress', handleBufferUpdate);
|
||||||
|
clearInterval(interval);
|
||||||
|
};
|
||||||
|
}, [videoUrl, totalDuration]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
progress,
|
||||||
|
buffered,
|
||||||
|
isTranscoding,
|
||||||
|
videoRef,
|
||||||
|
quality,
|
||||||
|
setQuality
|
||||||
|
};
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Advanced Anti-Jitter Features
|
||||||
|
|
||||||
|
#### 1. Predictive Buffering
|
||||||
|
```typescript
|
||||||
|
// Predict next segment and pre-buffer
|
||||||
|
const usePredictiveBuffering = (
|
||||||
|
currentTime: number,
|
||||||
|
totalDuration: number,
|
||||||
|
playbackRate: number
|
||||||
|
) => {
|
||||||
|
const [nextSegment, setNextSegment] = useState<number | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const currentSegment = Math.floor(currentTime / 2); // 2-second segments
|
||||||
|
const bufferAhead = Math.ceil(playbackRate * 10); // 10 seconds ahead
|
||||||
|
const targetSegment = currentSegment + bufferAhead;
|
||||||
|
|
||||||
|
if (targetSegment * 2 < totalDuration) {
|
||||||
|
setNextSegment(targetSegment);
|
||||||
|
}
|
||||||
|
}, [currentTime, totalDuration, playbackRate]);
|
||||||
|
|
||||||
|
return nextSegment;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. Quality Adaptation
|
||||||
|
```typescript
|
||||||
|
// Adapt quality based on buffer state
|
||||||
|
const useAdaptiveQuality = (
|
||||||
|
buffered: number,
|
||||||
|
progress: number,
|
||||||
|
bandwidth: number
|
||||||
|
) => {
|
||||||
|
const [quality, setQuality] = useState('720p');
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const bufferRatio = buffered - progress;
|
||||||
|
|
||||||
|
if (bufferRatio < 0.1) {
|
||||||
|
setQuality('480p'); // Lower quality for poor buffering
|
||||||
|
} else if (bandwidth > 5000000) {
|
||||||
|
setQuality('1080p'); // Higher quality for good bandwidth
|
||||||
|
} else {
|
||||||
|
setQuality('720p'); // Default quality
|
||||||
|
}
|
||||||
|
}, [buffered, progress, bandwidth]);
|
||||||
|
|
||||||
|
return quality;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Implementation Guidelines
|
||||||
|
|
||||||
|
### 1. Duration Accuracy
|
||||||
|
- Always extract duration from video stream metadata, not container
|
||||||
|
- Use ffprobe/ffmpeg for precise duration extraction
|
||||||
|
- Validate duration against actual transcoded segments
|
||||||
|
- Handle edge cases (variable frame rate, corrupted metadata)
|
||||||
|
|
||||||
|
### 2. Progress Synchronization
|
||||||
|
- Use segment-based streaming (HLS/DASH) for granular control
|
||||||
|
- Implement atomic segment generation (never overwrite existing)
|
||||||
|
- Provide real-time transcoding status to frontend
|
||||||
|
- Handle dynamic duration changes gracefully
|
||||||
|
|
||||||
|
### 3. Anti-Jitter Strategies
|
||||||
|
- **Segment Validation**: Only serve complete, validated segments
|
||||||
|
- **State Consistency**: Maintain consistent state between backend and frontend
|
||||||
|
- **Smooth Updates**: Debounce progress updates and prevent backward jumps
|
||||||
|
- **Buffer Awareness**: Track actual buffer state vs transcoding progress
|
||||||
|
|
||||||
|
### 4. Error Handling
|
||||||
|
- **Graceful Degradation**: Remove incomplete segments rather than serving corrupted data
|
||||||
|
- **Timeout Management**: 15-second timeout for segment generation
|
||||||
|
- **Fallback Strategies**: Multiple quality levels for connection issues
|
||||||
|
- **Recovery Mechanisms**: Restart failed segments without user intervention
|
||||||
|
|
||||||
|
This comprehensive approach ensures smooth, accurate progress bars even during live transcoding, eliminating the jittery behavior common in streaming applications.
|
||||||
|
|
@ -0,0 +1,887 @@
|
||||||
|
# Next.js Adaptation Guide - Complete Implementation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This guide provides comprehensive strategies for implementing Stash's media streaming architecture in a Next.js application, covering process management, progress tracking, and anti-jitter mechanisms.
|
||||||
|
|
||||||
|
## Key Architecture Differences
|
||||||
|
|
||||||
|
| Stash (Go) | Next.js (Node.js) |
|
||||||
|
|------------|-------------------|
|
||||||
|
| `context.WithCancel` | `AbortController` |
|
||||||
|
| `http.Hijacker` | Request events (`close`, `aborted`) |
|
||||||
|
| `exec.Command` | `child_process.spawn` |
|
||||||
|
| `sync.WaitGroup` | `Promise.all` + process tracking |
|
||||||
|
| File locks | Process pool + cleanup |
|
||||||
|
| Graceful shutdown | `SIGTERM` handlers |
|
||||||
|
|
||||||
|
## 1. Core Process Management
|
||||||
|
|
||||||
|
### AbortController Pattern
|
||||||
|
```typescript
|
||||||
|
// lib/transcode/abort-controller.ts
|
||||||
|
import { spawn, ChildProcess } from 'child_process';
|
||||||
|
|
||||||
|
export class TranscodeController {
|
||||||
|
private controller: AbortController;
|
||||||
|
private process?: ChildProcess;
|
||||||
|
private startTime: Date;
|
||||||
|
private filePath: string;
|
||||||
|
|
||||||
|
constructor(filePath: string) {
|
||||||
|
this.controller = new AbortController();
|
||||||
|
this.startTime = new Date();
|
||||||
|
this.filePath = filePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
async startTranscode(args: string[]): Promise<ChildProcess> {
|
||||||
|
const { signal } = this.controller;
|
||||||
|
|
||||||
|
this.process = spawn('ffmpeg', args, {
|
||||||
|
signal,
|
||||||
|
stdio: ['ignore', 'pipe', 'pipe'],
|
||||||
|
detached: false
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle cleanup on abort
|
||||||
|
signal.addEventListener('abort', () => {
|
||||||
|
this.cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
return this.process;
|
||||||
|
}
|
||||||
|
|
||||||
|
private cleanup() {
|
||||||
|
if (this.process && !this.process.killed) {
|
||||||
|
this.process.kill('SIGKILL');
|
||||||
|
this.process.unref();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
abort() {
|
||||||
|
this.controller.abort();
|
||||||
|
}
|
||||||
|
|
||||||
|
getUptime(): number {
|
||||||
|
return Date.now() - this.startTime.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
isRunning(): boolean {
|
||||||
|
return this.process ? !this.process.killed : false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Process Pool Management
|
||||||
|
```typescript
|
||||||
|
// lib/transcode/process-pool.ts
|
||||||
|
import { ChildProcess } from 'child_process';
|
||||||
|
import { EventEmitter } from 'events';
|
||||||
|
|
||||||
|
interface ProcessEntry {
|
||||||
|
process: ChildProcess;
|
||||||
|
controller: TranscodeController;
|
||||||
|
startTime: Date;
|
||||||
|
filePath: string;
|
||||||
|
quality: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TranscodeProcessPool extends EventEmitter {
|
||||||
|
private processes = new Map<string, ProcessEntry>();
|
||||||
|
private maxProcesses = parseInt(process.env.MAX_TRANSCODE_PROCESSES || '4');
|
||||||
|
|
||||||
|
add(
|
||||||
|
key: string,
|
||||||
|
process: ChildProcess,
|
||||||
|
controller: TranscodeController,
|
||||||
|
filePath: string,
|
||||||
|
quality: string
|
||||||
|
): boolean {
|
||||||
|
if (this.processes.size >= this.maxProcesses) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.processes.set(key, {
|
||||||
|
process,
|
||||||
|
controller,
|
||||||
|
startTime: new Date(),
|
||||||
|
filePath,
|
||||||
|
quality
|
||||||
|
});
|
||||||
|
|
||||||
|
// Auto cleanup
|
||||||
|
process.on('exit', () => {
|
||||||
|
this.processes.delete(key);
|
||||||
|
this.emit('process-exit', key);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.emit('process-added', key);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
kill(key: string): boolean {
|
||||||
|
const entry = this.processes.get(key);
|
||||||
|
if (entry) {
|
||||||
|
entry.controller.abort();
|
||||||
|
this.processes.delete(key);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
killAll(): void {
|
||||||
|
for (const [key, entry] of this.processes) {
|
||||||
|
entry.controller.abort();
|
||||||
|
}
|
||||||
|
this.processes.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
getActiveProcesses(): ProcessEntry[] {
|
||||||
|
return Array.from(this.processes.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
getStats() {
|
||||||
|
return {
|
||||||
|
active: this.processes.size,
|
||||||
|
max: this.maxProcesses,
|
||||||
|
uptime: Array.from(this.processes.values()).map(p => ({
|
||||||
|
filePath: p.filePath,
|
||||||
|
quality: p.quality,
|
||||||
|
uptime: Date.now() - p.startTime.getTime()
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const transcodePool = new TranscodeProcessPool();
|
||||||
|
```
|
||||||
|
|
||||||
|
## 2. API Route Implementation
|
||||||
|
|
||||||
|
### Transcode API Route
|
||||||
|
```typescript
|
||||||
|
// pages/api/transcode/[...slug].ts
|
||||||
|
import { NextApiRequest, NextApiResponse } from 'next';
|
||||||
|
import { spawn } from 'child_process';
|
||||||
|
import { TranscodeController } from '../../../lib/transcode/abort-controller';
|
||||||
|
import { transcodePool } from '../../../lib/transcode/process-pool';
|
||||||
|
|
||||||
|
export const config = {
|
||||||
|
runtime: 'nodejs',
|
||||||
|
api: {
|
||||||
|
responseLimit: '500mb',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default async function handler(
|
||||||
|
req: NextApiRequest,
|
||||||
|
res: NextApiResponse
|
||||||
|
) {
|
||||||
|
if (req.method !== 'GET') {
|
||||||
|
return res.status(405).json({ error: 'Method not allowed' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const { file, quality = '720p', start = '0' } = req.query;
|
||||||
|
|
||||||
|
if (!file || typeof file !== 'string') {
|
||||||
|
return res.status(400).json({ error: 'File parameter required' });
|
||||||
|
}
|
||||||
|
|
||||||
|
const startTime = parseFloat(start as string);
|
||||||
|
const key = `${file}:${quality}:${startTime}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create transcode controller
|
||||||
|
const controller = new TranscodeController(file);
|
||||||
|
|
||||||
|
// Build ffmpeg arguments
|
||||||
|
const args = [
|
||||||
|
'-i', file,
|
||||||
|
'-ss', startTime.toString(),
|
||||||
|
'-c:v', 'libx264',
|
||||||
|
'-preset', 'fast',
|
||||||
|
'-crf', '23',
|
||||||
|
'-c:a', 'aac',
|
||||||
|
'-b:a', '128k',
|
||||||
|
'-f', 'mp4',
|
||||||
|
'-movflags', 'frag_keyframe+empty_moov',
|
||||||
|
'-max_muxing_queue_size', '9999',
|
||||||
|
'pipe:1'
|
||||||
|
];
|
||||||
|
|
||||||
|
// Handle client disconnect
|
||||||
|
req.on('close', () => {
|
||||||
|
controller.abort();
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on('aborted', () => {
|
||||||
|
controller.abort();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start transcoding
|
||||||
|
const process = await controller.startTranscode(args);
|
||||||
|
|
||||||
|
// Add to process pool
|
||||||
|
if (!transcodePool.add(key, process, controller, file, quality as string)) {
|
||||||
|
controller.abort();
|
||||||
|
return res.status(503).json({ error: 'Too many concurrent transcodes' });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set response headers
|
||||||
|
res.setHeader('Content-Type', 'video/mp4');
|
||||||
|
res.setHeader('Cache-Control', 'no-cache');
|
||||||
|
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||||
|
|
||||||
|
// Handle range requests
|
||||||
|
const range = req.headers.range;
|
||||||
|
if (range) {
|
||||||
|
const parts = range.replace(/bytes=/, '').split('-');
|
||||||
|
const start = parseInt(parts[0], 10);
|
||||||
|
const end = parts[1] ? parseInt(parts[1], 10) : undefined;
|
||||||
|
|
||||||
|
res.status(206);
|
||||||
|
res.setHeader('Content-Range', `bytes ${start}-${end || '*'}/${'*'}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stream output
|
||||||
|
process.stdout.pipe(res);
|
||||||
|
|
||||||
|
// Handle errors
|
||||||
|
process.stderr.on('data', (data) => {
|
||||||
|
console.error(`FFmpeg error: ${data}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('error', (error) => {
|
||||||
|
console.error('Transcoding process error:', error);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(500).json({ error: 'Transcoding failed' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Transcode error:', error);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(500).json({ error: 'Internal server error' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### HLS Manifest API
|
||||||
|
```typescript
|
||||||
|
// pages/api/hls/[id].ts
|
||||||
|
import { NextApiRequest, NextApiResponse } from 'next';
|
||||||
|
import { extractVideoMetadata } from '../../../lib/video/metadata';
|
||||||
|
|
||||||
|
export default async function handler(
|
||||||
|
req: NextApiRequest,
|
||||||
|
res: NextApiResponse
|
||||||
|
) {
|
||||||
|
const { id, quality = '720p' } = req.query;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const metadata = await extractVideoMetadata(id as string);
|
||||||
|
const segmentDuration = 2; // 2-second segments
|
||||||
|
const totalSegments = Math.ceil(metadata.duration / segmentDuration);
|
||||||
|
|
||||||
|
const manifest = [
|
||||||
|
'#EXTM3U',
|
||||||
|
'#EXT-X-VERSION:3',
|
||||||
|
'#EXT-X-TARGETDURATION:2',
|
||||||
|
'#EXT-X-MEDIA-SEQUENCE:0',
|
||||||
|
...Array.from({ length: totalSegments }, (_, i) => [
|
||||||
|
`#EXTINF:${Math.min(segmentDuration, metadata.duration - i * segmentDuration).toFixed(3)},`,
|
||||||
|
`/api/hls/${id}/segment/${i}?quality=${quality}`
|
||||||
|
]).flat(),
|
||||||
|
'#EXT-X-ENDLIST'
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
res.setHeader('Content-Type', 'application/vnd.apple.mpegurl');
|
||||||
|
res.setHeader('Cache-Control', 'no-cache');
|
||||||
|
res.send(manifest);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).json({ error: 'Failed to generate manifest' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3. Progress Tracking with Anti-Jitter
|
||||||
|
|
||||||
|
### React Hook Implementation
|
||||||
|
```typescript
|
||||||
|
// hooks/useTranscodeProgress.ts
|
||||||
|
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||||
|
|
||||||
|
interface ProgressState {
|
||||||
|
progress: number;
|
||||||
|
buffered: number;
|
||||||
|
isTranscoding: boolean;
|
||||||
|
currentTime: number;
|
||||||
|
duration: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TranscodeProgressOptions {
|
||||||
|
debounceMs?: number;
|
||||||
|
enableSmoothing?: boolean;
|
||||||
|
maxJitter?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useTranscodeProgress = (
|
||||||
|
videoUrl: string,
|
||||||
|
totalDuration: number,
|
||||||
|
options: TranscodeProgressOptions = {}
|
||||||
|
) => {
|
||||||
|
const {
|
||||||
|
debounceMs = 100,
|
||||||
|
enableSmoothing = true,
|
||||||
|
maxJitter = 0.01
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
const [state, setState] = useState<ProgressState>({
|
||||||
|
progress: 0,
|
||||||
|
buffered: 0,
|
||||||
|
isTranscoding: true,
|
||||||
|
currentTime: 0,
|
||||||
|
duration: totalDuration
|
||||||
|
});
|
||||||
|
|
||||||
|
const videoRef = useRef<HTMLVideoElement>(null);
|
||||||
|
const lastProgressRef = useRef(0);
|
||||||
|
const lastUpdateRef = useRef(Date.now());
|
||||||
|
const abortControllerRef = useRef<AbortController | null>(null);
|
||||||
|
|
||||||
|
const debounce = useCallback((func: Function, wait: number) => {
|
||||||
|
let timeout: NodeJS.Timeout;
|
||||||
|
return (...args: any[]) => {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
timeout = setTimeout(() => func(...args), wait);
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const updateProgress = useCallback(debounce((newProgress: number) => {
|
||||||
|
const now = Date.now();
|
||||||
|
const timeSinceLastUpdate = now - lastUpdateRef.current;
|
||||||
|
|
||||||
|
// Anti-jitter: only update if significant change or time elapsed
|
||||||
|
const progressDiff = Math.abs(newProgress - lastProgressRef.current);
|
||||||
|
const shouldUpdate = progressDiff > maxJitter || timeSinceLastUpdate > 500;
|
||||||
|
|
||||||
|
if (shouldUpdate) {
|
||||||
|
lastProgressRef.current = newProgress;
|
||||||
|
lastUpdateRef.current = now;
|
||||||
|
|
||||||
|
setState(prev => ({
|
||||||
|
...prev,
|
||||||
|
progress: Math.min(newProgress, 1),
|
||||||
|
currentTime: newProgress * totalDuration
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}, debounceMs), [debounceMs, maxJitter, totalDuration]);
|
||||||
|
|
||||||
|
const updateBuffer = useCallback(debounce(() => {
|
||||||
|
const video = videoRef.current;
|
||||||
|
if (!video) return;
|
||||||
|
|
||||||
|
const buffered = video.buffered;
|
||||||
|
if (buffered.length > 0) {
|
||||||
|
const bufferedEnd = buffered.end(buffered.length - 1);
|
||||||
|
const bufferedRatio = bufferedEnd / totalDuration;
|
||||||
|
|
||||||
|
setState(prev => ({
|
||||||
|
...prev,
|
||||||
|
buffered: Math.min(bufferedRatio, 1)
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}, 250), [totalDuration]);
|
||||||
|
|
||||||
|
const checkTranscodingStatus = useCallback(async () => {
|
||||||
|
if (abortControllerRef.current) {
|
||||||
|
abortControllerRef.current.abort();
|
||||||
|
}
|
||||||
|
|
||||||
|
abortControllerRef.current = new AbortController();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/transcode/status?url=${encodeURIComponent(videoUrl)}`, {
|
||||||
|
signal: abortControllerRef.current.signal
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
setState(prev => ({
|
||||||
|
...prev,
|
||||||
|
isTranscoding: data.transcoding,
|
||||||
|
duration: data.duration || prev.duration
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.name !== 'AbortError') {
|
||||||
|
console.error('Failed to check transcoding status:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [videoUrl]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const video = videoRef.current;
|
||||||
|
if (!video) return;
|
||||||
|
|
||||||
|
const handleTimeUpdate = () => {
|
||||||
|
updateProgress(video.currentTime / totalDuration);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleProgress = () => {
|
||||||
|
updateBuffer();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleLoadedMetadata = () => {
|
||||||
|
setState(prev => ({
|
||||||
|
...prev,
|
||||||
|
duration: video.duration || totalDuration
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
video.addEventListener('timeupdate', handleTimeUpdate);
|
||||||
|
video.addEventListener('progress', handleProgress);
|
||||||
|
video.addEventListener('loadedmetadata', handleLoadedMetadata);
|
||||||
|
|
||||||
|
const interval = setInterval(checkTranscodingStatus, 2000);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
video.removeEventListener('timeupdate', handleTimeUpdate);
|
||||||
|
video.removeEventListener('progress', handleProgress);
|
||||||
|
video.removeEventListener('loadedmetadata', handleLoadedMetadata);
|
||||||
|
clearInterval(interval);
|
||||||
|
|
||||||
|
if (abortControllerRef.current) {
|
||||||
|
abortControllerRef.current.abort();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [videoRef, totalDuration, updateProgress, updateBuffer, checkTranscodingStatus]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
videoRef,
|
||||||
|
retry: checkTranscodingStatus
|
||||||
|
};
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. Video Player Component
|
||||||
|
|
||||||
|
### Complete Player Implementation
|
||||||
|
```typescript
|
||||||
|
// components/VideoPlayer.tsx
|
||||||
|
import React, { useRef, useEffect } from 'react';
|
||||||
|
import { useTranscodeProgress } from '../hooks/useTranscodeProgress';
|
||||||
|
|
||||||
|
interface VideoPlayerProps {
|
||||||
|
src: string;
|
||||||
|
poster?: string;
|
||||||
|
autoPlay?: boolean;
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const VideoPlayer: React.FC<VideoPlayerProps> = ({
|
||||||
|
src,
|
||||||
|
poster,
|
||||||
|
autoPlay = false,
|
||||||
|
className
|
||||||
|
}) => {
|
||||||
|
const {
|
||||||
|
videoRef,
|
||||||
|
progress,
|
||||||
|
buffered,
|
||||||
|
isTranscoding,
|
||||||
|
currentTime,
|
||||||
|
duration,
|
||||||
|
retry
|
||||||
|
} = useTranscodeProgress(src, 0);
|
||||||
|
|
||||||
|
const handleSeek = (time: number) => {
|
||||||
|
if (videoRef.current) {
|
||||||
|
videoRef.current.currentTime = time;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatTime = (seconds: number) => {
|
||||||
|
const mins = Math.floor(seconds / 60);
|
||||||
|
const secs = Math.floor(seconds % 60);
|
||||||
|
return `${mins}:${secs.toString().padStart(2, '0')}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={`video-player ${className || ''}`}>
|
||||||
|
<video
|
||||||
|
ref={videoRef}
|
||||||
|
src={src}
|
||||||
|
poster={poster}
|
||||||
|
autoPlay={autoPlay}
|
||||||
|
controls
|
||||||
|
className="video-element"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{isTranscoding && (
|
||||||
|
<div className="transcoding-overlay">
|
||||||
|
<div className="transcoding-indicator">
|
||||||
|
<div className="spinner" />
|
||||||
|
<span>Transcoding...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="progress-bar">
|
||||||
|
<div
|
||||||
|
className="progress-fill"
|
||||||
|
style={{ width: `${progress * 100}%` }}
|
||||||
|
/>
|
||||||
|
<div
|
||||||
|
className="buffer-fill"
|
||||||
|
style={{ width: `${buffered * 100}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="time-display">
|
||||||
|
<span>{formatTime(currentTime)} / {formatTime(duration)}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## 5. Health Monitoring
|
||||||
|
|
||||||
|
### Health Check Endpoint
|
||||||
|
```typescript
|
||||||
|
// pages/api/health.ts
|
||||||
|
import { NextApiRequest, NextApiResponse } from 'next';
|
||||||
|
import { transcodePool } from '../../lib/transcode/process-pool';
|
||||||
|
import { healthMonitor } from '../../lib/transcode/health-monitor';
|
||||||
|
|
||||||
|
export default function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
|
const stats = transcodePool.getStats();
|
||||||
|
const memoryUsage = process.memoryUsage();
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
status: 'healthy',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
processes: stats,
|
||||||
|
memory: {
|
||||||
|
used: memoryUsage.heapUsed,
|
||||||
|
total: memoryUsage.heapTotal,
|
||||||
|
external: memoryUsage.external
|
||||||
|
},
|
||||||
|
uptime: process.uptime()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Health Monitor Service
|
||||||
|
```typescript
|
||||||
|
// lib/transcode/health-monitor.ts
|
||||||
|
import { transcodePool } from './process-pool';
|
||||||
|
|
||||||
|
export class HealthMonitor {
|
||||||
|
private interval?: NodeJS.Timeout;
|
||||||
|
private maxMemoryUsage = 0.9; // 90%
|
||||||
|
private maxProcessUptime = 300000; // 5 minutes
|
||||||
|
|
||||||
|
start() {
|
||||||
|
this.interval = setInterval(() => {
|
||||||
|
this.checkMemoryUsage();
|
||||||
|
this.checkProcessTimeouts();
|
||||||
|
this.logStats();
|
||||||
|
}, 30000);
|
||||||
|
}
|
||||||
|
|
||||||
|
stop() {
|
||||||
|
if (this.interval) {
|
||||||
|
clearInterval(this.interval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private checkMemoryUsage() {
|
||||||
|
const memUsage = process.memoryUsage();
|
||||||
|
const ratio = memUsage.heapUsed / memUsage.heapTotal;
|
||||||
|
|
||||||
|
if (ratio > this.maxMemoryUsage) {
|
||||||
|
console.warn('High memory usage detected, cleaning up old processes');
|
||||||
|
this.cleanupOldProcesses();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private checkProcessTimeouts() {
|
||||||
|
const processes = transcodePool.getActiveProcesses();
|
||||||
|
|
||||||
|
processes.forEach(p => {
|
||||||
|
if (p.uptime > this.maxProcessUptime) {
|
||||||
|
console.warn(`Killing hanging process: ${p.filePath}`);
|
||||||
|
transcodePool.kill(`${p.filePath}:${p.quality}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private logStats() {
|
||||||
|
const stats = transcodePool.getStats();
|
||||||
|
console.log('Transcode stats:', {
|
||||||
|
active: stats.active,
|
||||||
|
uptime: stats.uptime
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private cleanupOldProcesses() {
|
||||||
|
const processes = transcodePool.getActiveProcesses();
|
||||||
|
const sorted = processes.sort((a, b) => a.uptime - b.uptime);
|
||||||
|
|
||||||
|
// Keep newest half, kill oldest half
|
||||||
|
const toKeep = Math.floor(sorted.length / 2);
|
||||||
|
sorted.slice(0, toKeep).forEach(p => {
|
||||||
|
transcodePool.kill(`${p.filePath}:${p.quality}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const healthMonitor = new HealthMonitor();
|
||||||
|
```
|
||||||
|
|
||||||
|
## 6. Production Deployment
|
||||||
|
|
||||||
|
### PM2 Configuration
|
||||||
|
```javascript
|
||||||
|
// ecosystem.config.js
|
||||||
|
module.exports = {
|
||||||
|
apps: [{
|
||||||
|
name: 'nextjs-transcode',
|
||||||
|
script: 'node_modules/.bin/next',
|
||||||
|
args: 'start',
|
||||||
|
instances: 'max',
|
||||||
|
exec_mode: 'cluster',
|
||||||
|
max_memory_restart: '1G',
|
||||||
|
env: {
|
||||||
|
NODE_ENV: 'production',
|
||||||
|
PORT: 3000,
|
||||||
|
MAX_TRANSCODE_PROCESSES: '4',
|
||||||
|
TRANSCODE_TIMEOUT: '300000'
|
||||||
|
},
|
||||||
|
kill_timeout: 10000,
|
||||||
|
shutdown_with_message: true
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Configuration
|
||||||
|
```dockerfile
|
||||||
|
FROM node:18-alpine
|
||||||
|
|
||||||
|
# Install FFmpeg
|
||||||
|
RUN apk add --no-cache ffmpeg
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy package files
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --only=production
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build the application
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
# Set resource limits
|
||||||
|
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||||
|
ENV MAX_TRANSCODE_PROCESSES="4"
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
CMD ["npm", "start"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
```bash
|
||||||
|
# .env.production
|
||||||
|
NODE_ENV=production
|
||||||
|
MAX_TRANSCODE_PROCESSES=4
|
||||||
|
TRANSCODE_TIMEOUT=300000
|
||||||
|
FFMPEG_PATH=/usr/bin/ffmpeg
|
||||||
|
MAX_MEMORY_USAGE=0.9
|
||||||
|
HEALTH_CHECK_INTERVAL=30000
|
||||||
|
```
|
||||||
|
|
||||||
|
### Kubernetes Deployment
|
||||||
|
```yaml
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: nextjs-transcode
|
||||||
|
spec:
|
||||||
|
replicas: 3
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: nextjs-transcode
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: nextjs-transcode
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: nextjs
|
||||||
|
image: nextjs-transcode:latest
|
||||||
|
ports:
|
||||||
|
- containerPort: 3000
|
||||||
|
env:
|
||||||
|
- name: NODE_ENV
|
||||||
|
value: "production"
|
||||||
|
- name: MAX_TRANSCODE_PROCESSES
|
||||||
|
value: "2"
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "512Mi"
|
||||||
|
cpu: "500m"
|
||||||
|
limits:
|
||||||
|
memory: "2Gi"
|
||||||
|
cpu: "1000m"
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /api/health
|
||||||
|
port: 3000
|
||||||
|
initialDelaySeconds: 30
|
||||||
|
periodSeconds: 10
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /api/health
|
||||||
|
port: 3000
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 5
|
||||||
|
```
|
||||||
|
|
||||||
|
## 7. Testing Strategy
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
```typescript
|
||||||
|
// __tests__/transcode-controller.test.ts
|
||||||
|
import { TranscodeController } from '../lib/transcode/abort-controller';
|
||||||
|
|
||||||
|
describe('TranscodeController', () => {
|
||||||
|
it('should start and abort transcoding', async () => {
|
||||||
|
const controller = new TranscodeController('test.mp4');
|
||||||
|
const process = await controller.startTranscode(['-i', 'test.mp4', '-f', 'null', '-']);
|
||||||
|
|
||||||
|
expect(controller.isRunning()).toBe(true);
|
||||||
|
|
||||||
|
controller.abort();
|
||||||
|
|
||||||
|
// Wait for process to exit
|
||||||
|
await new Promise(resolve => process.on('exit', resolve));
|
||||||
|
|
||||||
|
expect(controller.isRunning()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle process timeout', async () => {
|
||||||
|
const controller = new TranscodeController('test.mp4');
|
||||||
|
|
||||||
|
// Test with invalid file to force quick exit
|
||||||
|
const process = await controller.startTranscode(['-i', 'nonexistent.mp4', '-f', 'null', '-']);
|
||||||
|
|
||||||
|
const exitCode = await new Promise(resolve => process.on('exit', resolve));
|
||||||
|
expect(exitCode).toBe(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
```typescript
|
||||||
|
// __tests__/api-transcode.test.ts
|
||||||
|
import { createServer } from 'http';
|
||||||
|
import request from 'supertest';
|
||||||
|
import { parse } from 'url';
|
||||||
|
import next from 'next';
|
||||||
|
|
||||||
|
describe('/api/transcode', () => {
|
||||||
|
let server: any;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const app = next({ dev: false });
|
||||||
|
await app.prepare();
|
||||||
|
|
||||||
|
server = createServer((req, res) => {
|
||||||
|
const parsedUrl = parse(req.url, true);
|
||||||
|
app.getRequestHandler()(req, res, parsedUrl);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle transcode requests', async () => {
|
||||||
|
const response = await request(server)
|
||||||
|
.get('/api/transcode/test.mp4')
|
||||||
|
.expect(200);
|
||||||
|
|
||||||
|
expect(response.headers['content-type']).toBe('video/mp4');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle client disconnect', async () => {
|
||||||
|
const response = await request(server)
|
||||||
|
.get('/api/transcode/test.mp4')
|
||||||
|
.timeout(1000); // Force timeout
|
||||||
|
|
||||||
|
expect(response.status).toBe(408);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## 8. Performance Optimization
|
||||||
|
|
||||||
|
### Caching Strategy
|
||||||
|
```typescript
|
||||||
|
// lib/cache/transcode-cache.ts
|
||||||
|
import NodeCache from 'node-cache';
|
||||||
|
|
||||||
|
export class TranscodeCache {
|
||||||
|
private cache = new NodeCache({
|
||||||
|
stdTTL: 3600, // 1 hour
|
||||||
|
checkperiod: 600, // 10 minutes
|
||||||
|
maxKeys: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
get(key: string): Buffer | undefined {
|
||||||
|
return this.cache.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
set(key: string, data: Buffer, ttl?: number): boolean {
|
||||||
|
return this.cache.set(key, data, ttl);
|
||||||
|
}
|
||||||
|
|
||||||
|
has(key: string): boolean {
|
||||||
|
return this.cache.has(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(key: string): number {
|
||||||
|
return this.cache.del(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
getStats() {
|
||||||
|
return this.cache.getStats();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const transcodeCache = new TranscodeCache();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Rate Limiting
|
||||||
|
```typescript
|
||||||
|
// lib/middleware/rate-limit.ts
|
||||||
|
import rateLimit from 'express-rate-limit';
|
||||||
|
|
||||||
|
export const transcodeRateLimit = rateLimit({
|
||||||
|
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||||
|
max: 100, // Limit each IP to 100 requests per windowMs
|
||||||
|
message: 'Too many transcode requests',
|
||||||
|
standardHeaders: true,
|
||||||
|
legacyHeaders: false,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
This comprehensive Next.js adaptation provides all the components needed to implement Stash's reliable transcoding system with modern Node.js patterns and Next.js optimizations.
|
||||||
|
Before Width: | Height: | Size: 82 KiB |
|
Before Width: | Height: | Size: 115 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 52 KiB |
|
Before Width: | Height: | Size: 162 KiB |
|
Before Width: | Height: | Size: 68 KiB |
|
Before Width: | Height: | Size: 103 KiB |
|
Before Width: | Height: | Size: 139 KiB |
|
Before Width: | Height: | Size: 120 KiB |
|
Before Width: | Height: | Size: 133 KiB |
|
Before Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 83 KiB |
|
Before Width: | Height: | Size: 152 KiB |
|
Before Width: | Height: | Size: 46 KiB |
|
Before Width: | Height: | Size: 127 KiB |
|
Before Width: | Height: | Size: 101 KiB |
|
Before Width: | Height: | Size: 148 KiB |
|
Before Width: | Height: | Size: 99 KiB |
|
Before Width: | Height: | Size: 60 KiB |
|
Before Width: | Height: | Size: 112 KiB |
|
Before Width: | Height: | Size: 176 KiB |
|
Before Width: | Height: | Size: 62 KiB |
|
Before Width: | Height: | Size: 443 B |
|
Before Width: | Height: | Size: 73 KiB |
|
Before Width: | Height: | Size: 46 KiB |
|
Before Width: | Height: | Size: 57 KiB |
|
Before Width: | Height: | Size: 132 KiB |
|
Before Width: | Height: | Size: 95 KiB |
|
Before Width: | Height: | Size: 84 KiB |
|
Before Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 100 KiB |
|
Before Width: | Height: | Size: 137 KiB |
|
Before Width: | Height: | Size: 148 KiB |
|
Before Width: | Height: | Size: 78 KiB |
|
Before Width: | Height: | Size: 150 KiB |
|
Before Width: | Height: | Size: 130 KiB |
|
Before Width: | Height: | Size: 92 KiB |
|
Before Width: | Height: | Size: 101 KiB |
|
Before Width: | Height: | Size: 95 KiB |
|
Before Width: | Height: | Size: 118 KiB |
|
Before Width: | Height: | Size: 119 KiB |
|
Before Width: | Height: | Size: 383 B |
|
Before Width: | Height: | Size: 56 KiB |
|
Before Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 127 KiB |
|
Before Width: | Height: | Size: 129 KiB |
|
Before Width: | Height: | Size: 103 KiB |
|
Before Width: | Height: | Size: 96 KiB |
|
Before Width: | Height: | Size: 52 KiB |