refactor: fix biome linting issues and update project documentation

- Fix 36+ biome linting issues reducing errors/warnings from 227 to 191
- Replace explicit 'any' types with proper TypeScript interfaces
- Fix React hooks dependencies and useCallback patterns
- Resolve unused variables and parameter assignment issues
- Improve accessibility with proper label associations
- Add comprehensive API documentation for admin and security features
- Update README.md with accurate PostgreSQL setup and current tech stack
- Create complete documentation for audit logging, CSP monitoring, and batch processing
- Fix outdated project information and missing developer workflows
This commit is contained in:
2025-07-11 21:50:53 +02:00
committed by Kaj Kowalski
parent 3e9e75e854
commit 1eea2cc3e4
121 changed files with 28687 additions and 4895 deletions

View File

@@ -0,0 +1,390 @@
#!/usr/bin/env tsx
import {
buildCSP,
validateCSP,
testCSPImplementation,
generateNonce,
detectCSPBypass,
} from "../lib/csp";
interface CSPValidationResult {
configuration: string;
csp: string;
validation: ReturnType<typeof validateCSP>;
implementation: ReturnType<typeof testCSPImplementation>;
nonce?: string;
}
class EnhancedCSPValidator {
private results: CSPValidationResult[] = [];
async validateAllConfigurations() {
console.log("🔒 Enhanced CSP Validation Suite");
console.log("================================\n");
// Test configurations
const configurations = [
{
name: "Development (Permissive)",
config: { isDevelopment: true, reportUri: "/api/csp-report" },
},
{
name: "Production (Standard)",
config: {
isDevelopment: false,
nonce: generateNonce(),
reportUri: "/api/csp-report",
strictMode: false,
},
},
{
name: "Production (Strict Mode)",
config: {
isDevelopment: false,
nonce: generateNonce(),
reportUri: "/api/csp-report",
strictMode: true,
allowedExternalDomains: [
"https://api.openai.com",
"https://livedash.notso.ai",
],
},
},
{
name: "Production (Maximum Security)",
config: {
isDevelopment: false,
nonce: generateNonce(),
reportUri: "/api/csp-report",
strictMode: true,
allowedExternalDomains: ["https://api.openai.com"],
reportingLevel: "all" as const,
},
},
];
for (const { name, config } of configurations) {
await this.validateConfiguration(name, config);
}
this.generateReport();
await this.testBypassDetection();
await this.testRealWorldScenarios();
}
private async validateConfiguration(name: string, config: any) {
console.log(`🧪 Testing ${name}...`);
const csp = buildCSP(config);
const validation = validateCSP(csp, { strictMode: config.strictMode });
const implementation = testCSPImplementation(csp);
this.results.push({
configuration: name,
csp,
validation,
implementation,
nonce: config.nonce,
});
// Short summary
const emoji =
validation.securityScore >= 90
? "🟢"
: validation.securityScore >= 70
? "🟡"
: "🔴";
console.log(` ${emoji} Security Score: ${validation.securityScore}%`);
console.log(` 📊 Implementation Score: ${implementation.overallScore}%`);
if (validation.errors.length > 0) {
console.log(` ❌ Errors: ${validation.errors.length}`);
}
if (validation.warnings.length > 0) {
console.log(` ⚠️ Warnings: ${validation.warnings.length}`);
}
console.log();
}
private generateReport() {
console.log("📋 Detailed Validation Report");
console.log("============================\n");
for (const result of this.results) {
console.log(`📌 ${result.configuration}`);
console.log("-".repeat(result.configuration.length + 2));
// CSP Policy
console.log(`\nCSP Policy (${result.csp.length} chars):`);
console.log(
`${result.csp.substring(0, 120)}${result.csp.length > 120 ? "..." : ""}\n`
);
// Security Analysis
console.log("🛡️ Security Analysis:");
console.log(` Score: ${result.validation.securityScore}%`);
if (result.validation.errors.length > 0) {
console.log(` Errors:`);
for (const error of result.validation.errors) {
console.log(`${error}`);
}
}
if (result.validation.warnings.length > 0) {
console.log(` Warnings:`);
for (const warning of result.validation.warnings) {
console.log(` ⚠️ ${warning}`);
}
}
if (result.validation.recommendations.length > 0) {
console.log(` Recommendations:`);
for (const rec of result.validation.recommendations) {
console.log(` 💡 ${rec}`);
}
}
// Implementation Tests
console.log("\n🧪 Implementation Tests:");
for (const test of result.implementation.testResults) {
const emoji = test.passed ? "✅" : "❌";
console.log(` ${emoji} ${test.name}: ${test.description}`);
if (test.recommendation) {
console.log(` 💡 ${test.recommendation}`);
}
}
console.log(
` Overall Implementation Score: ${result.implementation.overallScore}%\n`
);
console.log();
}
}
private async testBypassDetection() {
console.log("🕵️ CSP Bypass Detection Tests");
console.log("==============================\n");
const bypassAttempts = [
{
name: "JavaScript Protocol",
content: "<a href='javascript:alert(1)'>Click</a>",
expectedRisk: "high",
},
{
name: "Data URI Script",
content: "<script src='data:text/javascript,alert(1)'></script>",
expectedRisk: "high",
},
{
name: "Eval Injection",
content: "eval('alert(1)')",
expectedRisk: "high",
},
{
name: "Function Constructor",
content: "new Function('alert(1)')()",
expectedRisk: "high",
},
{
name: "setTimeout String",
content: "setTimeout('alert(1)', 1000)",
expectedRisk: "medium",
},
{
name: "JSONP Callback",
content: "callback=<script>alert(1)</script>",
expectedRisk: "medium",
},
{
name: "Safe Content",
content: "const x = document.getElementById('safe');",
expectedRisk: "low",
},
];
let detectionTests = 0;
let passedDetections = 0;
for (const attempt of bypassAttempts) {
const detection = detectCSPBypass(attempt.content);
const testPassed =
detection.isDetected === (attempt.expectedRisk !== "low");
detectionTests++;
if (testPassed) passedDetections++;
const emoji = testPassed ? "✅" : "❌";
const riskEmoji =
detection.riskLevel === "high"
? "🚨"
: detection.riskLevel === "medium"
? "⚠️"
: "🟢";
console.log(`${emoji} ${attempt.name}`);
console.log(
` Content: ${attempt.content.substring(0, 50)}${attempt.content.length > 50 ? "..." : ""}`
);
console.log(
` ${riskEmoji} Risk Level: ${detection.riskLevel} (expected: ${attempt.expectedRisk})`
);
console.log(` Detected: ${detection.isDetected}`);
if (detection.patterns.length > 0) {
console.log(` Patterns: ${detection.patterns.length} matched`);
}
console.log();
}
console.log(
`🎯 Bypass Detection Score: ${Math.round((passedDetections / detectionTests) * 100)}%\n`
);
}
private async testRealWorldScenarios() {
console.log("🌍 Real-World Scenario Tests");
console.log("============================\n");
const scenarios = [
{
name: "Leaflet Maps Integration",
sources: [
"https://unpkg.com/leaflet@1.9.4/dist/leaflet.css",
"https://tile.openstreetmap.org/{z}/{x}/{y}.png",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.9.4/leaflet.js",
],
test: (csp: string) => {
return (
csp.includes("https://*.openstreetmap.org") ||
csp.includes("https://tile.openstreetmap.org") ||
csp.includes("https:")
);
},
},
{
name: "OpenAI API Integration",
sources: [
"https://api.openai.com/v1/chat/completions",
"https://api.openai.com/v1/files",
],
test: (csp: string) => {
return (
csp.includes("https://api.openai.com") || csp.includes("https:")
);
},
},
{
name: "Schema.org Structured Data",
sources: ["https://schema.org/SoftwareApplication"],
test: (csp: string) => {
return csp.includes("https://schema.org") || csp.includes("https:");
},
},
{
name: "WebSocket Development (HMR)",
sources: [
"ws://localhost:3000/_next/webpack-hmr",
"wss://localhost:3000/_next/webpack-hmr",
],
test: (csp: string) => {
return csp.includes("ws:") || csp.includes("wss:");
},
},
];
for (const scenario of scenarios) {
console.log(`🧪 ${scenario.name}`);
// Test with production strict mode
const productionCSP = buildCSP({
isDevelopment: false,
nonce: generateNonce(),
strictMode: true,
allowedExternalDomains: [
"https://api.openai.com",
"https://schema.org",
],
});
// Test with development mode
const devCSP = buildCSP({
isDevelopment: true,
reportUri: "/api/csp-report",
});
const prodSupport = scenario.test(productionCSP);
const devSupport = scenario.test(devCSP);
console.log(
` Production (Strict): ${prodSupport ? "✅ Supported" : "❌ Blocked"}`
);
console.log(
` Development: ${devSupport ? "✅ Supported" : "❌ Blocked"}`
);
if (!prodSupport && scenario.name !== "WebSocket Development (HMR)") {
console.log(` 💡 May need to add domains to allowedExternalDomains`);
}
console.log(` Required sources: ${scenario.sources.length}`);
for (const source of scenario.sources.slice(0, 2)) {
console.log(` - ${source}`);
}
if (scenario.sources.length > 2) {
console.log(` ... and ${scenario.sources.length - 2} more`);
}
console.log();
}
}
async run() {
try {
await this.validateAllConfigurations();
// Final summary
const scores = this.results.map((r) => r.validation.securityScore);
const avgScore = Math.round(
scores.reduce((a, b) => a + b, 0) / scores.length
);
console.log("🎯 Final Assessment");
console.log("==================");
console.log(`Average Security Score: ${avgScore}%`);
if (avgScore >= 95) {
console.log(
"🏆 Excellent CSP implementation! Industry-leading security."
);
} else if (avgScore >= 85) {
console.log("🥇 Very good CSP implementation with strong security.");
} else if (avgScore >= 70) {
console.log("🥈 Good CSP implementation with room for improvement.");
} else {
console.log(
"🥉 CSP implementation needs significant security improvements."
);
}
console.log("\n💡 General Recommendations:");
console.log("- Test CSP changes in development before deploying");
console.log("- Monitor CSP violation reports regularly");
console.log("- Review and update CSP policies quarterly");
console.log("- Use strict mode in production environments");
console.log("- Keep allowed external domains to minimum necessary");
} catch (error) {
console.error("❌ Validation failed:", error);
process.exit(1);
}
}
}
// Run validation if script is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
const validator = new EnhancedCSPValidator();
validator.run();
}
export default EnhancedCSPValidator;

View File

@@ -5,26 +5,31 @@ This directory contains comprehensive migration scripts for deploying the new ar
## Migration Components
### 1. Database Migrations
- `01-schema-migrations.sql` - Prisma database schema migrations
- `02-data-migrations.sql` - Data transformation scripts
- `validate-database.ts` - Database validation and health checks
### 2. Environment Configuration
- `environment-migration.ts` - Environment variable migration guide
- `config-validator.ts` - Configuration validation scripts
### 3. Deployment Scripts
- `deploy.ts` - Main deployment orchestrator
- `pre-deployment-checks.ts` - Pre-deployment validation
- `post-deployment-validation.ts` - Post-deployment verification
- `rollback.ts` - Rollback procedures
### 4. Health Checks
- `health-checks.ts` - Comprehensive system health validation
- `trpc-endpoint-tests.ts` - tRPC endpoint validation
- `batch-processing-tests.ts` - Batch processing system tests
### 5. Migration Utilities
- `backup-database.ts` - Database backup procedures
- `restore-database.ts` - Database restore procedures
- `migration-logger.ts` - Migration logging utilities
@@ -32,21 +37,25 @@ This directory contains comprehensive migration scripts for deploying the new ar
## Usage
### Pre-Migration
1. Run database backup: `pnpm migration:backup`
2. Validate environment: `pnpm migration:validate-env`
3. Run pre-deployment checks: `pnpm migration:pre-check`
### Migration
1. Run schema migrations: `pnpm migration:schema`
2. Run data migrations: `pnpm migration:data`
3. Deploy application: `pnpm migration:deploy`
### Post-Migration
1. Validate deployment: `pnpm migration:validate`
2. Run health checks: `pnpm migration:health-check`
3. Test critical paths: `pnpm migration:test`
### Rollback (if needed)
1. Rollback deployment: `pnpm migration:rollback`
2. Restore database: `pnpm migration:restore`
@@ -90,4 +99,4 @@ The migration implements a blue-green deployment strategy:
- Health checks at each stage
- Progressive feature enablement
- Comprehensive logging and monitoring
- Backwards compatibility maintained during migration
- Backwards compatibility maintained during migration

View File

@@ -90,13 +90,12 @@ export class DatabaseBackup {
migrationLogger.info("DATABASE_BACKUP", "Backup completed successfully", {
path: finalPath,
sizeBytes: stats.size,
sizeMB: Math.round(stats.size / 1024 / 1024 * 100) / 100,
sizeMB: Math.round((stats.size / 1024 / 1024) * 100) / 100,
duration,
checksum: checksumMD5,
});
return result;
} catch (error) {
const duration = Date.now() - startTime;
migrationLogger.failStep("DATABASE_BACKUP", error as Error);
@@ -136,13 +135,15 @@ export class DatabaseBackup {
/**
* List existing backups with metadata
*/
async listBackups(backupDir?: string): Promise<Array<{
filename: string;
path: string;
size: number;
created: Date;
type: string;
}>> {
async listBackups(backupDir?: string): Promise<
Array<{
filename: string;
path: string;
size: number;
created: Date;
type: string;
}>
> {
const dir = backupDir || this.defaultOptions.outputDir;
if (!existsSync(dir)) {
@@ -150,11 +151,13 @@ export class DatabaseBackup {
}
try {
const files = await import("node:fs/promises").then(fs => fs.readdir(dir));
const files = await import("node:fs/promises").then((fs) =>
fs.readdir(dir)
);
const backups = [];
for (const file of files) {
if (file.endsWith('.sql') || file.endsWith('.sql.gz')) {
if (file.endsWith(".sql") || file.endsWith(".sql.gz")) {
const fullPath = join(dir, file);
const stats = statSync(fullPath);
@@ -174,9 +177,10 @@ export class DatabaseBackup {
}
return backups.sort((a, b) => b.created.getTime() - a.created.getTime());
} catch (error) {
migrationLogger.warn("BACKUP_LIST", "Failed to list backups", { error: (error as Error).message });
migrationLogger.warn("BACKUP_LIST", "Failed to list backups", {
error: (error as Error).message,
});
return [];
}
}
@@ -216,11 +220,16 @@ export class DatabaseBackup {
): Promise<void> {
return new Promise((resolve, reject) => {
const args = [
"-h", dbConfig.host,
"-p", dbConfig.port,
"-U", dbConfig.username,
"-d", dbConfig.database,
"-f", outputPath,
"-h",
dbConfig.host,
"-p",
dbConfig.port,
"-U",
dbConfig.username,
"-d",
dbConfig.database,
"-f",
outputPath,
"--verbose",
];
@@ -234,15 +243,17 @@ export class DatabaseBackup {
// Additional options for better backup quality
args.push(
"--create", // Include CREATE DATABASE
"--clean", // Include DROP statements
"--if-exists", // Use IF EXISTS
"--disable-triggers", // Disable triggers during restore
"--no-owner", // Don't output ownership commands
"--no-privileges" // Don't output privilege commands
"--create", // Include CREATE DATABASE
"--clean", // Include DROP statements
"--if-exists", // Use IF EXISTS
"--disable-triggers", // Disable triggers during restore
"--no-owner", // Don't output ownership commands
"--no-privileges" // Don't output privilege commands
);
migrationLogger.debug("PG_DUMP", "Starting pg_dump", { args: args.filter(arg => arg !== dbConfig.password) });
migrationLogger.debug("PG_DUMP", "Starting pg_dump", {
args: args.filter((arg) => arg !== dbConfig.password),
});
const process = spawn("pg_dump", args, {
env: {
@@ -278,7 +289,10 @@ export class DatabaseBackup {
});
}
private async compressBackup(sourcePath: string, targetPath: string): Promise<void> {
private async compressBackup(
sourcePath: string,
targetPath: string
): Promise<void> {
return new Promise((resolve, reject) => {
const fs = require("node:fs");
const readStream = fs.createReadStream(sourcePath);
@@ -291,7 +305,10 @@ export class DatabaseBackup {
.on("finish", () => {
// Remove uncompressed file
fs.unlinkSync(sourcePath);
migrationLogger.debug("COMPRESSION", `Compressed backup: ${targetPath}`);
migrationLogger.debug(
"COMPRESSION",
`Compressed backup: ${targetPath}`
);
resolve();
})
.on("error", reject);
@@ -311,14 +328,18 @@ export class DatabaseBackup {
stream.on("data", (data) => hash.update(data));
stream.on("end", () => {
const checksum = hash.digest("hex");
migrationLogger.debug("BACKUP_VERIFICATION", `Backup checksum: ${checksum}`);
migrationLogger.debug(
"BACKUP_VERIFICATION",
`Backup checksum: ${checksum}`
);
resolve(checksum);
});
stream.on("error", reject);
});
} catch (error) {
migrationLogger.warn("BACKUP_VERIFICATION", "Failed to verify backup", { error: (error as Error).message });
migrationLogger.warn("BACKUP_VERIFICATION", "Failed to verify backup", {
error: (error as Error).message,
});
throw error;
}
}
@@ -326,28 +347,44 @@ export class DatabaseBackup {
/**
* Clean up old backups, keeping only the specified number
*/
async cleanupOldBackups(keepCount: number = 5, backupDir?: string): Promise<void> {
async cleanupOldBackups(
keepCount: number = 5,
backupDir?: string
): Promise<void> {
const dir = backupDir || this.defaultOptions.outputDir;
const backups = await this.listBackups(dir);
if (backups.length <= keepCount) {
migrationLogger.info("BACKUP_CLEANUP", `No cleanup needed. Found ${backups.length} backups, keeping ${keepCount}`);
migrationLogger.info(
"BACKUP_CLEANUP",
`No cleanup needed. Found ${backups.length} backups, keeping ${keepCount}`
);
return;
}
const toDelete = backups.slice(keepCount);
migrationLogger.info("BACKUP_CLEANUP", `Cleaning up ${toDelete.length} old backups`);
migrationLogger.info(
"BACKUP_CLEANUP",
`Cleaning up ${toDelete.length} old backups`
);
const fs = await import("node:fs/promises");
for (const backup of toDelete) {
try {
await fs.unlink(backup.path);
migrationLogger.debug("BACKUP_CLEANUP", `Deleted old backup: ${backup.filename}`);
migrationLogger.debug(
"BACKUP_CLEANUP",
`Deleted old backup: ${backup.filename}`
);
} catch (error) {
migrationLogger.warn("BACKUP_CLEANUP", `Failed to delete backup: ${backup.filename}`, {
error: (error as Error).message
});
migrationLogger.warn(
"BACKUP_CLEANUP",
`Failed to delete backup: ${backup.filename}`,
{
error: (error as Error).message,
}
);
}
}
}
@@ -372,13 +409,15 @@ if (import.meta.url === `file://${process.argv[1]}`) {
case "list":
const backups = await backup.listBackups();
console.log('\n=== DATABASE BACKUPS ===');
console.log("\n=== DATABASE BACKUPS ===");
if (backups.length === 0) {
console.log('No backups found.');
console.log("No backups found.");
} else {
backups.forEach(b => {
const sizeMB = Math.round(b.size / 1024 / 1024 * 100) / 100;
console.log(`${b.filename} (${b.type}, ${sizeMB}MB, ${b.created.toISOString()})`);
backups.forEach((b) => {
const sizeMB = Math.round((b.size / 1024 / 1024) * 100) / 100;
console.log(
`${b.filename} (${b.type}, ${sizeMB}MB, ${b.created.toISOString()})`
);
});
}
return { success: true, backupPath: "", size: 0, duration: 0 };
@@ -410,11 +449,13 @@ Examples:
runCommand()
.then((result) => {
if (command !== "list" && command !== "cleanup") {
console.log('\n=== BACKUP RESULTS ===');
console.log(`Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== BACKUP RESULTS ===");
console.log(`Success: ${result.success ? "✅" : "❌"}`);
if (result.success) {
console.log(`Path: ${result.backupPath}`);
console.log(`Size: ${Math.round(result.size / 1024 / 1024 * 100) / 100} MB`);
console.log(
`Size: ${Math.round((result.size / 1024 / 1024) * 100) / 100} MB`
);
console.log(`Duration: ${result.duration}ms`);
if (result.checksumMD5) {
console.log(`Checksum: ${result.checksumMD5}`);
@@ -427,7 +468,7 @@ Examples:
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Backup failed:', error);
console.error("Backup failed:", error);
process.exit(1);
});
}
}

View File

@@ -10,7 +10,11 @@ import { migrationLogger } from "./migration-logger";
interface BatchTest {
name: string;
testFn: () => Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }>;
testFn: () => Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}>;
critical: boolean;
timeout: number;
}
@@ -47,7 +51,10 @@ export class BatchProcessingTester {
const tests: BatchTestResult[] = [];
try {
migrationLogger.startStep("BATCH_TESTS", "Running batch processing system validation tests");
migrationLogger.startStep(
"BATCH_TESTS",
"Running batch processing system validation tests"
);
// Define test suite
const batchTests: BatchTest[] = [
@@ -120,9 +127,12 @@ export class BatchProcessingTester {
}
const totalDuration = Date.now() - startTime;
const passedTests = tests.filter(t => t.success).length;
const failedTests = tests.filter(t => !t.success).length;
const criticalFailures = tests.filter(t => !t.success && batchTests.find(bt => bt.name === t.name)?.critical).length;
const passedTests = tests.filter((t) => t.success).length;
const failedTests = tests.filter((t) => !t.success).length;
const criticalFailures = tests.filter(
(t) =>
!t.success && batchTests.find((bt) => bt.name === t.name)?.critical
).length;
const result: BatchSystemTestResult = {
success: criticalFailures === 0,
@@ -136,13 +146,19 @@ export class BatchProcessingTester {
if (result.success) {
migrationLogger.completeStep("BATCH_TESTS");
} else {
migrationLogger.failStep("BATCH_TESTS", new Error(`${criticalFailures} critical batch tests failed`));
migrationLogger.failStep(
"BATCH_TESTS",
new Error(`${criticalFailures} critical batch tests failed`)
);
}
return result;
} catch (error) {
migrationLogger.error("BATCH_TESTS", "Batch processing test suite failed", error as Error);
migrationLogger.error(
"BATCH_TESTS",
"Batch processing test suite failed",
error as Error
);
throw error;
} finally {
await this.prisma.$disconnect();
@@ -160,10 +176,7 @@ export class BatchProcessingTester {
setTimeout(() => reject(new Error("Test timeout")), test.timeout);
});
const testResult = await Promise.race([
test.testFn(),
timeoutPromise
]);
const testResult = await Promise.race([test.testFn(), timeoutPromise]);
const duration = Date.now() - startTime;
@@ -178,21 +191,25 @@ export class BatchProcessingTester {
if (testResult.success) {
migrationLogger.debug("BATCH_TEST", `${test.name} passed`, {
duration,
details: testResult.details
details: testResult.details,
});
} else {
migrationLogger.warn("BATCH_TEST", `${test.name} failed`, {
duration,
error: testResult.error?.message
error: testResult.error?.message,
});
}
return result;
} catch (error) {
const duration = Date.now() - startTime;
migrationLogger.error("BATCH_TEST", `💥 ${test.name} crashed`, error as Error, { duration });
migrationLogger.error(
"BATCH_TEST",
`💥 ${test.name} crashed`,
error as Error,
{ duration }
);
return {
name: test.name,
@@ -203,81 +220,107 @@ export class BatchProcessingTester {
}
}
private async testDatabaseSchema(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testDatabaseSchema(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Check if AIBatchRequest table exists and has correct columns
const batchRequestTableCheck = await this.prisma.$queryRaw<{count: string}[]>`
const batchRequestTableCheck = await this.prisma.$queryRaw<
{ count: string }[]
>`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_name = 'AIBatchRequest'
`;
if (parseInt(batchRequestTableCheck[0]?.count || '0') === 0) {
if (parseInt(batchRequestTableCheck[0]?.count || "0") === 0) {
return {
success: false,
error: new Error("AIBatchRequest table not found")
error: new Error("AIBatchRequest table not found"),
};
}
// Check required columns
const requiredColumns = [
'openaiBatchId', 'inputFileId', 'outputFileId', 'status', 'companyId'
"openaiBatchId",
"inputFileId",
"outputFileId",
"status",
"companyId",
];
const columnChecks = await Promise.all(
requiredColumns.map(async (column) => {
const result = await this.prisma.$queryRawUnsafe(`
const result = (await this.prisma.$queryRawUnsafe(`
SELECT COUNT(*) as count
FROM information_schema.columns
WHERE table_name = 'AIBatchRequest' AND column_name = '${column}'
`) as {count: string}[];
return { column, exists: parseInt(result[0]?.count || '0') > 0 };
`)) as { count: string }[];
return { column, exists: parseInt(result[0]?.count || "0") > 0 };
})
);
const missingColumns = columnChecks.filter(c => !c.exists).map(c => c.column);
const missingColumns = columnChecks
.filter((c) => !c.exists)
.map((c) => c.column);
// Check AIProcessingRequest has batch fields
const processingRequestBatchFields = await this.prisma.$queryRawUnsafe(`
const processingRequestBatchFields = (await this.prisma.$queryRawUnsafe(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'AIProcessingRequest'
AND column_name IN ('processingStatus', 'batchId')
`) as {column_name: string}[];
`)) as { column_name: string }[];
const hasProcessingStatus = processingRequestBatchFields.some(c => c.column_name === 'processingStatus');
const hasBatchId = processingRequestBatchFields.some(c => c.column_name === 'batchId');
const hasProcessingStatus = processingRequestBatchFields.some(
(c) => c.column_name === "processingStatus"
);
const hasBatchId = processingRequestBatchFields.some(
(c) => c.column_name === "batchId"
);
return {
success: missingColumns.length === 0 && hasProcessingStatus && hasBatchId,
success:
missingColumns.length === 0 && hasProcessingStatus && hasBatchId,
details: {
missingColumns,
hasProcessingStatus,
hasBatchId,
requiredColumnsPresent: requiredColumns.length - missingColumns.length
requiredColumnsPresent:
requiredColumns.length - missingColumns.length,
},
error: missingColumns.length > 0 || !hasProcessingStatus || !hasBatchId
? new Error(`Schema validation failed: missing ${missingColumns.join(', ')}${!hasProcessingStatus ? ', processingStatus' : ''}${!hasBatchId ? ', batchId' : ''}`)
: undefined
error:
missingColumns.length > 0 || !hasProcessingStatus || !hasBatchId
? new Error(
`Schema validation failed: missing ${missingColumns.join(", ")}${!hasProcessingStatus ? ", processingStatus" : ""}${!hasBatchId ? ", batchId" : ""}`
)
: undefined,
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async testBatchProcessorImport(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testBatchProcessorImport(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Test if batch processor can be imported
const batchProcessor = await import("../../lib/batchProcessor");
// Check if key functions/classes exist
const hasBatchConfig = 'BATCH_CONFIG' in batchProcessor;
const hasCreateBatch = typeof batchProcessor.createBatchFromRequests === 'function';
const hasProcessBatch = typeof batchProcessor.processBatchResults === 'function';
const hasBatchConfig = "BATCH_CONFIG" in batchProcessor;
const hasCreateBatch =
typeof batchProcessor.createBatchFromRequests === "function";
const hasProcessBatch =
typeof batchProcessor.processBatchResults === "function";
return {
success: hasBatchConfig || hasCreateBatch || hasProcessBatch, // At least one should exist
@@ -286,79 +329,85 @@ export class BatchProcessingTester {
hasBatchConfig,
hasCreateBatch,
hasProcessBatch,
exportedItems: Object.keys(batchProcessor)
}
exportedItems: Object.keys(batchProcessor),
},
};
} catch (error) {
return {
success: false,
error: error as Error,
details: {
batchProcessorImported: false,
importError: (error as Error).message
}
importError: (error as Error).message,
},
};
}
}
private async testBatchRequestCreation(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testBatchRequestCreation(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Create a test batch request
const testBatchRequest = await this.prisma.aIBatchRequest.create({
data: {
companyId: 'test-company-' + Date.now(),
openaiBatchId: 'test-batch-' + Date.now(),
inputFileId: 'test-input-' + Date.now(),
status: 'PENDING',
}
companyId: "test-company-" + Date.now(),
openaiBatchId: "test-batch-" + Date.now(),
inputFileId: "test-input-" + Date.now(),
status: "PENDING",
},
});
// Verify it was created correctly
const retrievedBatch = await this.prisma.aIBatchRequest.findUnique({
where: { id: testBatchRequest.id }
where: { id: testBatchRequest.id },
});
// Clean up test data
await this.prisma.aIBatchRequest.delete({
where: { id: testBatchRequest.id }
where: { id: testBatchRequest.id },
});
return {
success: !!retrievedBatch && retrievedBatch.status === 'PENDING',
success: !!retrievedBatch && retrievedBatch.status === "PENDING",
details: {
batchRequestCreated: !!testBatchRequest,
batchRequestRetrieved: !!retrievedBatch,
statusCorrect: retrievedBatch?.status === 'PENDING',
testBatchId: testBatchRequest.id
}
statusCorrect: retrievedBatch?.status === "PENDING",
testBatchId: testBatchRequest.id,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async testProcessingRequestManagement(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testProcessingRequestManagement(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Count existing processing requests
const initialCount = await this.prisma.aIProcessingRequest.count();
// Check processing status distribution
const statusDistribution = await this.prisma.aIProcessingRequest.groupBy({
by: ['processingStatus'],
by: ["processingStatus"],
_count: { processingStatus: true },
});
// Check if we can query requests ready for batching
const readyForBatching = await this.prisma.aIProcessingRequest.findMany({
where: {
processingStatus: 'PENDING_BATCHING'
processingStatus: "PENDING_BATCHING",
},
take: 5
take: 5,
});
return {
@@ -366,40 +415,46 @@ export class BatchProcessingTester {
details: {
totalProcessingRequests: initialCount,
statusDistribution: Object.fromEntries(
statusDistribution.map(s => [s.processingStatus, s._count.processingStatus])
statusDistribution.map((s) => [
s.processingStatus,
s._count.processingStatus,
])
),
readyForBatchingCount: readyForBatching.length,
canQueryByStatus: true
}
canQueryByStatus: true,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async testBatchStatusTransitions(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testBatchStatusTransitions(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Test that we can update batch status through all states
const testBatchRequest = await this.prisma.aIBatchRequest.create({
data: {
companyId: 'test-company-' + Date.now(),
openaiBatchId: 'test-status-batch-' + Date.now(),
inputFileId: 'test-status-input-' + Date.now(),
status: 'PENDING',
}
companyId: "test-company-" + Date.now(),
openaiBatchId: "test-status-batch-" + Date.now(),
inputFileId: "test-status-input-" + Date.now(),
status: "PENDING",
},
});
const statusTransitions = [
'UPLOADING',
'VALIDATING',
'IN_PROGRESS',
'FINALIZING',
'COMPLETED',
'PROCESSED'
"UPLOADING",
"VALIDATING",
"IN_PROGRESS",
"FINALIZING",
"COMPLETED",
"PROCESSED",
] as const;
const transitionResults: boolean[] = [];
@@ -408,7 +463,7 @@ export class BatchProcessingTester {
try {
await this.prisma.aIBatchRequest.update({
where: { id: testBatchRequest.id },
data: { status }
data: { status },
});
transitionResults.push(true);
} catch (error) {
@@ -418,10 +473,10 @@ export class BatchProcessingTester {
// Clean up test data
await this.prisma.aIBatchRequest.delete({
where: { id: testBatchRequest.id }
where: { id: testBatchRequest.id },
});
const successfulTransitions = transitionResults.filter(r => r).length;
const successfulTransitions = transitionResults.filter((r) => r).length;
return {
success: successfulTransitions === statusTransitions.length,
@@ -430,30 +485,38 @@ export class BatchProcessingTester {
successfulTransitions,
failedTransitions: statusTransitions.length - successfulTransitions,
transitionResults: Object.fromEntries(
statusTransitions.map((status, index) => [status, transitionResults[index]])
)
}
statusTransitions.map((status, index) => [
status,
transitionResults[index],
])
),
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async testBatchScheduling(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testBatchScheduling(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Test if batch scheduler can be imported
const batchScheduler = await import("../../lib/batchScheduler");
// Check if scheduling functions exist
const hasScheduler = typeof batchScheduler.startBatchScheduler === 'function';
const hasProcessor = typeof batchScheduler.processPendingBatches === 'function';
const hasScheduler =
typeof batchScheduler.startBatchScheduler === "function";
const hasProcessor =
typeof batchScheduler.processPendingBatches === "function";
// Check environment variables for scheduling
const batchEnabled = process.env.BATCH_PROCESSING_ENABLED === 'true';
const batchEnabled = process.env.BATCH_PROCESSING_ENABLED === "true";
const hasIntervals = !!(
process.env.BATCH_CREATE_INTERVAL &&
process.env.BATCH_STATUS_CHECK_INTERVAL &&
@@ -468,35 +531,38 @@ export class BatchProcessingTester {
hasProcessor,
batchEnabled,
hasIntervals,
exportedItems: Object.keys(batchScheduler)
}
exportedItems: Object.keys(batchScheduler),
},
};
} catch (error) {
return {
success: false,
error: error as Error,
details: {
batchSchedulerImported: false,
importError: (error as Error).message
}
importError: (error as Error).message,
},
};
}
}
private async testOpenAIIntegration(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testOpenAIIntegration(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const apiKey = process.env.OPENAI_API_KEY;
const mockMode = process.env.OPENAI_MOCK_MODE === 'true';
const mockMode = process.env.OPENAI_MOCK_MODE === "true";
if (mockMode) {
return {
success: true,
details: {
mode: 'mock',
mode: "mock",
apiKeyPresent: !!apiKey,
testType: 'mock_mode_enabled'
}
testType: "mock_mode_enabled",
},
};
}
@@ -505,70 +571,77 @@ export class BatchProcessingTester {
success: false,
error: new Error("OpenAI API key not configured"),
details: {
mode: 'live',
apiKeyPresent: false
}
mode: "live",
apiKeyPresent: false,
},
};
}
// Test basic API access (simple models list)
const response = await fetch("https://api.openai.com/v1/models", {
headers: {
"Authorization": `Bearer ${apiKey}`,
Authorization: `Bearer ${apiKey}`,
},
});
if (!response.ok) {
return {
success: false,
error: new Error(`OpenAI API access failed: ${response.status} ${response.statusText}`),
error: new Error(
`OpenAI API access failed: ${response.status} ${response.statusText}`
),
details: {
mode: 'live',
mode: "live",
apiKeyPresent: true,
httpStatus: response.status
}
httpStatus: response.status,
},
};
}
const models = await response.json();
const hasModels = models.data && Array.isArray(models.data) && models.data.length > 0;
const hasModels =
models.data && Array.isArray(models.data) && models.data.length > 0;
return {
success: hasModels,
details: {
mode: 'live',
mode: "live",
apiKeyPresent: true,
apiAccessible: true,
modelsCount: models.data?.length || 0,
hasGPTModels: models.data?.some((m: any) => m.id.includes('gpt')) || false
}
hasGPTModels:
models.data?.some((m: any) => m.id.includes("gpt")) || false,
},
};
} catch (error) {
return {
success: false,
error: error as Error,
details: {
mode: 'live',
mode: "live",
apiKeyPresent: !!process.env.OPENAI_API_KEY,
networkError: true
}
networkError: true,
},
};
}
}
private async testErrorHandling(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testErrorHandling(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Test handling of invalid batch requests
let invalidBatchHandled = false;
try {
await this.prisma.aIBatchRequest.create({
data: {
companyId: '', // Invalid empty company ID
openaiBatchId: 'test-invalid-batch',
inputFileId: 'test-invalid-input',
status: 'PENDING',
}
companyId: "", // Invalid empty company ID
openaiBatchId: "test-invalid-batch",
inputFileId: "test-invalid-input",
status: "PENDING",
},
});
} catch (error) {
// This should fail, which means error handling is working
@@ -577,28 +650,28 @@ export class BatchProcessingTester {
// Test handling of duplicate OpenAI batch IDs
let duplicateHandled = false;
const uniqueId = 'test-duplicate-' + Date.now();
const uniqueId = "test-duplicate-" + Date.now();
try {
// Create first batch
const firstBatch = await this.prisma.aIBatchRequest.create({
data: {
companyId: 'test-company-duplicate',
companyId: "test-company-duplicate",
openaiBatchId: uniqueId,
inputFileId: 'test-duplicate-input-1',
status: 'PENDING',
}
inputFileId: "test-duplicate-input-1",
status: "PENDING",
},
});
// Try to create duplicate
try {
await this.prisma.aIBatchRequest.create({
data: {
companyId: 'test-company-duplicate',
companyId: "test-company-duplicate",
openaiBatchId: uniqueId, // Same OpenAI batch ID
inputFileId: 'test-duplicate-input-2',
status: 'PENDING',
}
inputFileId: "test-duplicate-input-2",
status: "PENDING",
},
});
} catch (error) {
// This should fail due to unique constraint
@@ -607,9 +680,8 @@ export class BatchProcessingTester {
// Clean up
await this.prisma.aIBatchRequest.delete({
where: { id: firstBatch.id }
where: { id: firstBatch.id },
});
} catch (error) {
// Initial creation failed, that's also error handling
duplicateHandled = true;
@@ -620,19 +692,22 @@ export class BatchProcessingTester {
details: {
invalidBatchHandled,
duplicateHandled,
errorHandlingWorking: invalidBatchHandled && duplicateHandled
}
errorHandlingWorking: invalidBatchHandled && duplicateHandled,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async testBatchPerformance(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testBatchPerformance(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Test query performance for batch operations
const startTime = Date.now();
@@ -640,9 +715,9 @@ export class BatchProcessingTester {
// Query for batches ready for processing
const pendingBatches = await this.prisma.aIBatchRequest.findMany({
where: {
status: { in: ['PENDING', 'UPLOADING', 'VALIDATING'] }
status: { in: ["PENDING", "UPLOADING", "VALIDATING"] },
},
take: 100
take: 100,
});
const pendingBatchesTime = Date.now() - startTime;
@@ -652,15 +727,16 @@ export class BatchProcessingTester {
const readyRequests = await this.prisma.aIProcessingRequest.findMany({
where: {
processingStatus: 'PENDING_BATCHING'
processingStatus: "PENDING_BATCHING",
},
take: 100
take: 100,
});
const readyRequestsTime = Date.now() - batchingStartTime;
// Query performance should be reasonable
const performanceAcceptable = pendingBatchesTime < 1000 && readyRequestsTime < 1000;
const performanceAcceptable =
pendingBatchesTime < 1000 && readyRequestsTime < 1000;
return {
success: performanceAcceptable,
@@ -670,22 +746,25 @@ export class BatchProcessingTester {
readyRequestsCount: readyRequests.length,
readyRequestsQueryTime: readyRequestsTime,
performanceAcceptable,
totalTestTime: Date.now() - startTime
}
totalTestTime: Date.now() - startTime,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async testDataConsistency(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async testDataConsistency(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Check for orphaned processing requests (batchId points to non-existent batch)
const orphanedRequests = await this.prisma.$queryRaw<{count: bigint}[]>`
const orphanedRequests = await this.prisma.$queryRaw<{ count: bigint }[]>`
SELECT COUNT(*) as count
FROM "AIProcessingRequest" apr
LEFT JOIN "AIBatchRequest" abr ON apr."batchId" = abr.id
@@ -695,7 +774,9 @@ export class BatchProcessingTester {
const orphanedCount = Number(orphanedRequests[0]?.count || 0);
// Check for processing requests with inconsistent status
const inconsistentRequests = await this.prisma.$queryRaw<{count: bigint}[]>`
const inconsistentRequests = await this.prisma.$queryRaw<
{ count: bigint }[]
>`
SELECT COUNT(*) as count
FROM "AIProcessingRequest"
WHERE ("batchId" IS NOT NULL AND "processingStatus" = 'PENDING_BATCHING')
@@ -705,7 +786,7 @@ export class BatchProcessingTester {
const inconsistentCount = Number(inconsistentRequests[0]?.count || 0);
// Check for batches with no associated requests
const emptyBatches = await this.prisma.$queryRaw<{count: bigint}[]>`
const emptyBatches = await this.prisma.$queryRaw<{ count: bigint }[]>`
SELECT COUNT(*) as count
FROM "AIBatchRequest" abr
LEFT JOIN "AIProcessingRequest" apr ON abr.id = apr."batchId"
@@ -723,15 +804,18 @@ export class BatchProcessingTester {
inconsistentRequests: inconsistentCount,
emptyBatches: emptyBatchCount,
dataConsistent,
issuesFound: orphanedCount + inconsistentCount
issuesFound: orphanedCount + inconsistentCount,
},
error: !dataConsistent ? new Error(`Data consistency issues found: ${orphanedCount} orphaned requests, ${inconsistentCount} inconsistent requests`) : undefined
error: !dataConsistent
? new Error(
`Data consistency issues found: ${orphanedCount} orphaned requests, ${inconsistentCount} inconsistent requests`
)
: undefined,
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
@@ -743,7 +827,7 @@ export class BatchProcessingTester {
const report = `
# Batch Processing System Test Report
**Overall Status**: ${result.success ? '✅ All Critical Tests Passed' : '❌ Critical Tests Failed'}
**Overall Status**: ${result.success ? "✅ All Critical Tests Passed" : "❌ Critical Tests Failed"}
**Total Duration**: ${result.totalDuration}ms
**Passed Tests**: ${result.passedTests}/${result.tests.length}
**Failed Tests**: ${result.failedTests}/${result.tests.length}
@@ -751,19 +835,24 @@ export class BatchProcessingTester {
## Test Results
${result.tests.map(test => `
${result.tests
.map(
(test) => `
### ${test.name}
- **Status**: ${test.success ? '✅ Pass' : '❌ Fail'}
- **Status**: ${test.success ? "✅ Pass" : "❌ Fail"}
- **Duration**: ${test.duration}ms
${test.details ? `- **Details**: \`\`\`json\n${JSON.stringify(test.details, null, 2)}\n\`\`\`` : ''}
${test.error ? `- **Error**: ${test.error.message}` : ''}
`).join('')}
${test.details ? `- **Details**: \`\`\`json\n${JSON.stringify(test.details, null, 2)}\n\`\`\`` : ""}
${test.error ? `- **Error**: ${test.error.message}` : ""}
`
)
.join("")}
## Summary
${result.success ?
'🎉 Batch processing system is working correctly!' :
`⚠️ ${result.criticalFailures} critical issue(s) found. Please review and fix the issues above.`
${
result.success
? "🎉 Batch processing system is working correctly!"
: `⚠️ ${result.criticalFailures} critical issue(s) found. Please review and fix the issues above.`
}
## Architecture Overview
@@ -776,14 +865,21 @@ The batch processing system provides:
- **Status monitoring** with 2-minute check intervals
- **Result processing** with 1-minute intervals
${result.failedTests > 0 ? `
${
result.failedTests > 0
? `
## Issues Found
${result.tests.filter(t => !t.success).map(test => `
${result.tests
.filter((t) => !t.success)
.map(
(test) => `
### ${test.name}
- **Error**: ${test.error?.message || 'Test failed'}
- **Details**: ${test.details ? JSON.stringify(test.details, null, 2) : 'No additional details'}
`).join('')}
- **Error**: ${test.error?.message || "Test failed"}
- **Details**: ${test.details ? JSON.stringify(test.details, null, 2) : "No additional details"}
`
)
.join("")}
## Recommended Actions
@@ -792,23 +888,27 @@ ${result.tests.filter(t => !t.success).map(test => `
3. **API Issues**: Check OpenAI API key configuration and network connectivity
4. **Performance Issues**: Optimize database queries and add missing indexes
5. **Data Issues**: Run data consistency checks and fix orphaned records
` : `
`
: `
## System Health
✅ All critical batch processing components are functioning correctly.
### Performance Metrics
${result.tests.find(t => t.name === "Batch Processing Performance")?.details ?
`- Pending batches query: ${(result.tests.find(t => t.name === "Batch Processing Performance")?.details as any)?.pendingBatchesQueryTime}ms
- Ready requests query: ${(result.tests.find(t => t.name === "Batch Processing Performance")?.details as any)?.readyRequestsQueryTime}ms`
: 'Performance metrics not available'}
${
result.tests.find((t) => t.name === "Batch Processing Performance")?.details
? `- Pending batches query: ${(result.tests.find((t) => t.name === "Batch Processing Performance")?.details as any)?.pendingBatchesQueryTime}ms
- Ready requests query: ${(result.tests.find((t) => t.name === "Batch Processing Performance")?.details as any)?.readyRequestsQueryTime}ms`
: "Performance metrics not available"
}
### Next Steps
1. Monitor batch processing queues regularly
2. Set up alerting for failed batches
3. Optimize batch sizes based on usage patterns
4. Consider implementing batch priority levels
`}
`
}
---
*Generated at ${new Date().toISOString()}*
@@ -824,18 +924,19 @@ if (import.meta.url === `file://${process.argv[1]}`) {
const generateReport = process.argv.includes("--report");
tester.runBatchProcessingTests()
tester
.runBatchProcessingTests()
.then((result) => {
console.log('\n=== BATCH PROCESSING TEST RESULTS ===');
console.log(`Overall Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== BATCH PROCESSING TEST RESULTS ===");
console.log(`Overall Success: ${result.success ? "✅" : "❌"}`);
console.log(`Total Duration: ${result.totalDuration}ms`);
console.log(`Passed Tests: ${result.passedTests}/${result.tests.length}`);
console.log(`Failed Tests: ${result.failedTests}/${result.tests.length}`);
console.log(`Critical Failures: ${result.criticalFailures}`);
console.log('\n=== INDIVIDUAL TEST RESULTS ===');
console.log("\n=== INDIVIDUAL TEST RESULTS ===");
for (const test of result.tests) {
const status = test.success ? '✅' : '❌';
const status = test.success ? "✅" : "❌";
console.log(`${status} ${test.name} (${test.duration}ms)`);
if (test.error) {
@@ -858,7 +959,7 @@ if (import.meta.url === `file://${process.argv[1]}`) {
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Batch processing tests failed:', error);
console.error("Batch processing tests failed:", error);
process.exit(1);
});
}
}

View File

@@ -71,7 +71,10 @@ export class DeploymentOrchestrator {
this.startTime = Date.now();
try {
migrationLogger.startPhase("DEPLOYMENT", `Starting deployment with options: ${JSON.stringify(this.options)}`);
migrationLogger.startPhase(
"DEPLOYMENT",
`Starting deployment with options: ${JSON.stringify(this.options)}`
);
// Pre-deployment phase
if (!this.options.skipPreChecks) {
@@ -97,7 +100,7 @@ export class DeploymentOrchestrator {
migrationLogger.info("DEPLOYMENT", "Deployment completed successfully", {
totalDuration,
downtime,
phases: this.executedPhases.length
phases: this.executedPhases.length,
});
return {
@@ -107,10 +110,10 @@ export class DeploymentOrchestrator {
downtime,
backupPath,
};
} catch (error) {
const totalDuration = Date.now() - this.startTime;
const downtime = this.downtimeEnd > 0 ? this.downtimeEnd - this.downtimeStart : 0;
const downtime =
this.downtimeEnd > 0 ? this.downtimeEnd - this.downtimeStart : 0;
migrationLogger.error("DEPLOYMENT", "Deployment failed", error as Error);
@@ -119,7 +122,11 @@ export class DeploymentOrchestrator {
try {
await this.performRollback();
} catch (rollbackError) {
migrationLogger.error("ROLLBACK", "Rollback failed", rollbackError as Error);
migrationLogger.error(
"ROLLBACK",
"Rollback failed",
rollbackError as Error
);
}
}
@@ -149,7 +156,9 @@ export class DeploymentOrchestrator {
const result = await envMigration.migrateEnvironment();
if (!result.success) {
throw new Error(`Environment migration failed: ${result.errors.join(', ')}`);
throw new Error(
`Environment migration failed: ${result.errors.join(", ")}`
);
}
},
},
@@ -191,7 +200,9 @@ export class DeploymentOrchestrator {
const downtime = this.downtimeEnd - this.downtimeStart;
if (downtime > this.options.maxDowntime) {
throw new Error(`Downtime exceeded maximum allowed: ${downtime}ms > ${this.options.maxDowntime}ms`);
throw new Error(
`Downtime exceeded maximum allowed: ${downtime}ms > ${this.options.maxDowntime}ms`
);
}
},
},
@@ -243,17 +254,25 @@ export class DeploymentOrchestrator {
}
private async runPreDeploymentChecks(): Promise<void> {
migrationLogger.startStep("PRE_CHECKS", "Running pre-deployment validation");
migrationLogger.startStep(
"PRE_CHECKS",
"Running pre-deployment validation"
);
const checker = new PreDeploymentChecker();
const result = await checker.runAllChecks();
if (!result.success) {
throw new Error(`Pre-deployment checks failed with ${result.criticalFailures} critical failures`);
throw new Error(
`Pre-deployment checks failed with ${result.criticalFailures} critical failures`
);
}
if (result.warningCount > 0) {
migrationLogger.warn("PRE_CHECKS", `Proceeding with ${result.warningCount} warnings`);
migrationLogger.warn(
"PRE_CHECKS",
`Proceeding with ${result.warningCount} warnings`
);
}
migrationLogger.completeStep("PRE_CHECKS");
@@ -280,11 +299,14 @@ export class DeploymentOrchestrator {
private async executePhase(phase: DeploymentPhase): Promise<void> {
try {
migrationLogger.startStep(phase.name.replace(/\s+/g, '_').toUpperCase(), phase.description);
migrationLogger.startStep(
phase.name.replace(/\s+/g, "_").toUpperCase(),
phase.description
);
if (this.options.dryRun) {
migrationLogger.info("DRY_RUN", `Would execute: ${phase.name}`);
await new Promise(resolve => setTimeout(resolve, 100)); // Simulate execution time
await new Promise((resolve) => setTimeout(resolve, 100)); // Simulate execution time
} else {
await phase.execute();
}
@@ -297,15 +319,23 @@ export class DeploymentOrchestrator {
}
}
migrationLogger.completeStep(phase.name.replace(/\s+/g, '_').toUpperCase());
migrationLogger.completeStep(
phase.name.replace(/\s+/g, "_").toUpperCase()
);
} catch (error) {
migrationLogger.failStep(phase.name.replace(/\s+/g, '_').toUpperCase(), error as Error);
migrationLogger.failStep(
phase.name.replace(/\s+/g, "_").toUpperCase(),
error as Error
);
if (phase.critical) {
throw error;
} else {
migrationLogger.warn("PHASE", `Non-critical phase failed: ${phase.name}`, { error: (error as Error).message });
migrationLogger.warn(
"PHASE",
`Non-critical phase failed: ${phase.name}`,
{ error: (error as Error).message }
);
}
}
}
@@ -322,8 +352,10 @@ export class DeploymentOrchestrator {
encoding: "utf8",
});
migrationLogger.info("DB_MIGRATION", "Database migrations completed successfully");
migrationLogger.info(
"DB_MIGRATION",
"Database migrations completed successfully"
);
} catch (error) {
throw new Error(`Database migration failed: ${(error as Error).message}`);
}
@@ -335,8 +367,10 @@ export class DeploymentOrchestrator {
try {
// This would typically involve running specific rollback migrations
// For now, we'll log the intent
migrationLogger.warn("DB_ROLLBACK", "Database rollback would be performed here");
migrationLogger.warn(
"DB_ROLLBACK",
"Database rollback would be performed here"
);
} catch (error) {
throw new Error(`Database rollback failed: ${(error as Error).message}`);
}
@@ -354,8 +388,10 @@ export class DeploymentOrchestrator {
encoding: "utf8",
});
migrationLogger.info("CODE_DEPLOY", "Application build completed successfully");
migrationLogger.info(
"CODE_DEPLOY",
"Application build completed successfully"
);
} catch (error) {
throw new Error(`Code deployment failed: ${(error as Error).message}`);
}
@@ -366,7 +402,7 @@ export class DeploymentOrchestrator {
// In a real deployment, this would restart the actual services
// For development, we'll simulate the restart
await new Promise(resolve => setTimeout(resolve, 1000));
await new Promise((resolve) => setTimeout(resolve, 1000));
migrationLogger.info("SERVICE_RESTART", "Services restarted successfully");
}
@@ -389,20 +425,29 @@ export class DeploymentOrchestrator {
const response = await fetch(`${baseUrl}/api/trpc/auth.getSession`);
return response.status === 200 || response.status === 401; // 401 is OK for auth endpoint
} catch (error) {
migrationLogger.error("TRPC_TEST", "tRPC endpoint test failed", error as Error);
migrationLogger.error(
"TRPC_TEST",
"tRPC endpoint test failed",
error as Error
);
return false;
}
}
private async activateBatchProcessing(): Promise<void> {
migrationLogger.info("BATCH_ACTIVATION", "Activating batch processing system");
migrationLogger.info(
"BATCH_ACTIVATION",
"Activating batch processing system"
);
// Set environment variable to enable batch processing
process.env.BATCH_PROCESSING_ENABLED = "true";
migrationLogger.info("BATCH_ACTIVATION", "Batch processing system activated");
migrationLogger.info(
"BATCH_ACTIVATION",
"Batch processing system activated"
);
}
private async testBatchProcessing(): Promise<boolean> {
@@ -412,28 +457,42 @@ export class DeploymentOrchestrator {
// Test that batch processing components can be imported
const { BatchProcessor } = await import("../../lib/batchProcessor");
return BatchProcessor !== undefined;
} catch (error) {
migrationLogger.error("BATCH_TEST", "Batch processing test failed", error as Error);
migrationLogger.error(
"BATCH_TEST",
"Batch processing test failed",
error as Error
);
return false;
}
}
private async runPostDeploymentValidation(): Promise<void> {
migrationLogger.info("POST_VALIDATION", "Running post-deployment validation");
migrationLogger.info(
"POST_VALIDATION",
"Running post-deployment validation"
);
const healthChecker = new HealthChecker();
const result = await healthChecker.runHealthChecks();
if (!result.success) {
throw new Error(`Post-deployment validation failed: ${result.errors.join(', ')}`);
throw new Error(
`Post-deployment validation failed: ${result.errors.join(", ")}`
);
}
migrationLogger.info("POST_VALIDATION", "Post-deployment validation passed");
migrationLogger.info(
"POST_VALIDATION",
"Post-deployment validation passed"
);
}
private async performProgressiveRollout(): Promise<void> {
migrationLogger.info("PROGRESSIVE_ROLLOUT", "Starting progressive feature rollout");
migrationLogger.info(
"PROGRESSIVE_ROLLOUT",
"Starting progressive feature rollout"
);
// This would implement a gradual rollout strategy
// For now, we'll just enable all features
@@ -444,20 +503,26 @@ export class DeploymentOrchestrator {
];
for (const step of rolloutSteps) {
migrationLogger.info("PROGRESSIVE_ROLLOUT", `Enabling ${step.feature} at ${step.percentage}%`);
await new Promise(resolve => setTimeout(resolve, 1000));
migrationLogger.info(
"PROGRESSIVE_ROLLOUT",
`Enabling ${step.feature} at ${step.percentage}%`
);
await new Promise((resolve) => setTimeout(resolve, 1000));
}
migrationLogger.info("PROGRESSIVE_ROLLOUT", "Progressive rollout completed");
migrationLogger.info(
"PROGRESSIVE_ROLLOUT",
"Progressive rollout completed"
);
}
private async performRollback(): Promise<void> {
migrationLogger.warn("ROLLBACK", "Starting deployment rollback");
// Rollback executed phases in reverse order
const rollbackPhases = this.phases.filter(p =>
this.executedPhases.includes(p.name) && p.rollback
).reverse();
const rollbackPhases = this.phases
.filter((p) => this.executedPhases.includes(p.name) && p.rollback)
.reverse();
for (const phase of rollbackPhases) {
try {
@@ -466,9 +531,12 @@ export class DeploymentOrchestrator {
if (phase.rollback) {
await phase.rollback();
}
} catch (error) {
migrationLogger.error("ROLLBACK", `Rollback failed for ${phase.name}`, error as Error);
migrationLogger.error(
"ROLLBACK",
`Rollback failed for ${phase.name}`,
error as Error
);
}
}
@@ -483,7 +551,7 @@ if (import.meta.url === `file://${process.argv[1]}`) {
const options: Partial<DeploymentOptions> = {};
// Parse command line arguments
args.forEach(arg => {
args.forEach((arg) => {
switch (arg) {
case "--dry-run":
options.dryRun = true;
@@ -505,10 +573,11 @@ if (import.meta.url === `file://${process.argv[1]}`) {
const orchestrator = new DeploymentOrchestrator(options);
orchestrator.deploy()
orchestrator
.deploy()
.then((result) => {
console.log('\n=== DEPLOYMENT RESULTS ===');
console.log(`Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== DEPLOYMENT RESULTS ===");
console.log(`Success: ${result.success ? "✅" : "❌"}`);
console.log(`Total Duration: ${result.totalDuration}ms`);
console.log(`Downtime: ${result.downtime}ms`);
console.log(`Completed Phases: ${result.completedPhases.length}`);
@@ -525,27 +594,27 @@ if (import.meta.url === `file://${process.argv[1]}`) {
console.error(`Error: ${result.error.message}`);
}
console.log('\nCompleted Phases:');
result.completedPhases.forEach(phase => console.log(`${phase}`));
console.log("\nCompleted Phases:");
result.completedPhases.forEach((phase) => console.log(`${phase}`));
if (result.success) {
console.log('\n🎉 DEPLOYMENT SUCCESSFUL!');
console.log('\nNext Steps:');
console.log('1. Monitor application logs for any issues');
console.log('2. Run post-deployment tests: pnpm migration:test');
console.log('3. Verify new features are working correctly');
console.log("\n🎉 DEPLOYMENT SUCCESSFUL!");
console.log("\nNext Steps:");
console.log("1. Monitor application logs for any issues");
console.log("2. Run post-deployment tests: pnpm migration:test");
console.log("3. Verify new features are working correctly");
} else {
console.log('\n💥 DEPLOYMENT FAILED!');
console.log('\nNext Steps:');
console.log('1. Check logs for error details');
console.log('2. Fix identified issues');
console.log('3. Re-run deployment');
console.log("\n💥 DEPLOYMENT FAILED!");
console.log("\nNext Steps:");
console.log("1. Check logs for error details");
console.log("2. Fix identified issues");
console.log("3. Re-run deployment");
}
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Deployment orchestration failed:', error);
console.error("Deployment orchestration failed:", error);
process.exit(1);
});
}
}

View File

@@ -39,7 +39,7 @@ export class EnvironmentMigration {
defaultValue: "http://localhost:3000/api/trpc",
required: false,
newInVersion: "2.0.0",
example: "https://yourdomain.com/api/trpc"
example: "https://yourdomain.com/api/trpc",
},
{
key: "TRPC_BATCH_TIMEOUT",
@@ -47,7 +47,7 @@ export class EnvironmentMigration {
defaultValue: "30000",
required: false,
newInVersion: "2.0.0",
validationRegex: "^[0-9]+$"
validationRegex: "^[0-9]+$",
},
{
key: "TRPC_MAX_BATCH_SIZE",
@@ -55,7 +55,7 @@ export class EnvironmentMigration {
defaultValue: "100",
required: false,
newInVersion: "2.0.0",
validationRegex: "^[0-9]+$"
validationRegex: "^[0-9]+$",
},
// Batch Processing Configuration
@@ -65,7 +65,7 @@ export class EnvironmentMigration {
defaultValue: "true",
required: false,
newInVersion: "2.0.0",
validationRegex: "^(true|false)$"
validationRegex: "^(true|false)$",
},
{
key: "BATCH_CREATE_INTERVAL",
@@ -73,7 +73,7 @@ export class EnvironmentMigration {
defaultValue: "*/5 * * * *",
required: false,
newInVersion: "2.0.0",
example: "*/5 * * * * (every 5 minutes)"
example: "*/5 * * * * (every 5 minutes)",
},
{
key: "BATCH_STATUS_CHECK_INTERVAL",
@@ -81,7 +81,7 @@ export class EnvironmentMigration {
defaultValue: "*/2 * * * *",
required: false,
newInVersion: "2.0.0",
example: "*/2 * * * * (every 2 minutes)"
example: "*/2 * * * * (every 2 minutes)",
},
{
key: "BATCH_RESULT_PROCESSING_INTERVAL",
@@ -89,7 +89,7 @@ export class EnvironmentMigration {
defaultValue: "*/1 * * * *",
required: false,
newInVersion: "2.0.0",
example: "*/1 * * * * (every minute)"
example: "*/1 * * * * (every minute)",
},
{
key: "BATCH_MAX_REQUESTS",
@@ -97,7 +97,7 @@ export class EnvironmentMigration {
defaultValue: "1000",
required: false,
newInVersion: "2.0.0",
validationRegex: "^[0-9]+$"
validationRegex: "^[0-9]+$",
},
{
key: "BATCH_TIMEOUT_HOURS",
@@ -105,7 +105,7 @@ export class EnvironmentMigration {
defaultValue: "24",
required: false,
newInVersion: "2.0.0",
validationRegex: "^[0-9]+$"
validationRegex: "^[0-9]+$",
},
// Migration Specific
@@ -115,7 +115,7 @@ export class EnvironmentMigration {
defaultValue: "development",
required: false,
newInVersion: "2.0.0",
validationRegex: "^(development|staging|production)$"
validationRegex: "^(development|staging|production)$",
},
{
key: "MIGRATION_BACKUP_ENABLED",
@@ -123,7 +123,7 @@ export class EnvironmentMigration {
defaultValue: "true",
required: false,
newInVersion: "2.0.0",
validationRegex: "^(true|false)$"
validationRegex: "^(true|false)$",
},
{
key: "MIGRATION_ROLLBACK_ENABLED",
@@ -131,7 +131,7 @@ export class EnvironmentMigration {
defaultValue: "true",
required: false,
newInVersion: "2.0.0",
validationRegex: "^(true|false)$"
validationRegex: "^(true|false)$",
},
// Enhanced Security
@@ -142,7 +142,7 @@ export class EnvironmentMigration {
required: false,
newInVersion: "2.0.0",
validationRegex: "^[0-9]+$",
example: "900000 (15 minutes)"
example: "900000 (15 minutes)",
},
{
key: "RATE_LIMIT_MAX_REQUESTS",
@@ -150,7 +150,7 @@ export class EnvironmentMigration {
defaultValue: "100",
required: false,
newInVersion: "2.0.0",
validationRegex: "^[0-9]+$"
validationRegex: "^[0-9]+$",
},
// Performance Monitoring
@@ -160,7 +160,7 @@ export class EnvironmentMigration {
defaultValue: "true",
required: false,
newInVersion: "2.0.0",
validationRegex: "^(true|false)$"
validationRegex: "^(true|false)$",
},
{
key: "METRICS_COLLECTION_INTERVAL",
@@ -168,8 +168,8 @@ export class EnvironmentMigration {
defaultValue: "60",
required: false,
newInVersion: "2.0.0",
validationRegex: "^[0-9]+$"
}
validationRegex: "^[0-9]+$",
},
];
private readonly deprecatedVariables: string[] = [
@@ -188,11 +188,14 @@ export class EnvironmentMigration {
warnings: [],
added: [],
deprecated: [],
updated: []
updated: [],
};
try {
migrationLogger.startStep("ENVIRONMENT_MIGRATION", "Migrating environment configuration");
migrationLogger.startStep(
"ENVIRONMENT_MIGRATION",
"Migrating environment configuration"
);
// Read current environment
const currentEnv = this.readCurrentEnvironment();
@@ -217,13 +220,21 @@ export class EnvironmentMigration {
if (result.success) {
migrationLogger.completeStep("ENVIRONMENT_MIGRATION");
} else {
migrationLogger.failStep("ENVIRONMENT_MIGRATION", new Error(`Migration failed with ${result.errors.length} errors`));
migrationLogger.failStep(
"ENVIRONMENT_MIGRATION",
new Error(`Migration failed with ${result.errors.length} errors`)
);
}
} catch (error) {
result.success = false;
result.errors.push(`Environment migration failed: ${(error as Error).message}`);
migrationLogger.error("ENVIRONMENT_MIGRATION", "Critical migration error", error as Error);
result.errors.push(
`Environment migration failed: ${(error as Error).message}`
);
migrationLogger.error(
"ENVIRONMENT_MIGRATION",
"Critical migration error",
error as Error
);
}
return result;
@@ -234,16 +245,22 @@ export class EnvironmentMigration {
const env: Record<string, string> = {};
// Merge environment from multiple sources
envFiles.forEach(filename => {
envFiles.forEach((filename) => {
const filepath = join(process.cwd(), filename);
if (existsSync(filepath)) {
try {
const content = readFileSync(filepath, "utf8");
const parsed = this.parseEnvFile(content);
Object.assign(env, parsed);
migrationLogger.debug("ENV_READER", `Loaded environment from ${filename}`, { variables: Object.keys(parsed).length });
migrationLogger.debug(
"ENV_READER",
`Loaded environment from ${filename}`,
{ variables: Object.keys(parsed).length }
);
} catch (error) {
migrationLogger.warn("ENV_READER", `Failed to read ${filename}`, { error: (error as Error).message });
migrationLogger.warn("ENV_READER", `Failed to read ${filename}`, {
error: (error as Error).message,
});
}
}
});
@@ -276,13 +293,16 @@ export class EnvironmentMigration {
currentEnv: Record<string, string>,
result: MigrationResult
): Promise<void> {
migrationLogger.info("ENV_VALIDATION", "Validating existing environment variables");
migrationLogger.info(
"ENV_VALIDATION",
"Validating existing environment variables"
);
// Check required existing variables
const requiredExisting = [
"DATABASE_URL",
"NEXTAUTH_SECRET",
"OPENAI_API_KEY"
"OPENAI_API_KEY",
];
for (const key of requiredExisting) {
@@ -310,7 +330,9 @@ export class EnvironmentMigration {
migrationLogger.info("ENV_ADDITION", "Adding new environment variables");
const newEnvContent: string[] = [];
newEnvContent.push("# New environment variables for tRPC and Batch Processing");
newEnvContent.push(
"# New environment variables for tRPC and Batch Processing"
);
newEnvContent.push("# Added during migration to version 2.0.0");
newEnvContent.push("");
@@ -318,12 +340,21 @@ export class EnvironmentMigration {
// Group variables by category
const categories = {
"tRPC Configuration": this.newEnvironmentVariables.filter(v => v.key.startsWith("TRPC_")),
"Batch Processing": this.newEnvironmentVariables.filter(v => v.key.startsWith("BATCH_")),
"Migration Settings": this.newEnvironmentVariables.filter(v => v.key.startsWith("MIGRATION_")),
"Security & Performance": this.newEnvironmentVariables.filter(v =>
v.key.startsWith("RATE_LIMIT_") || v.key.startsWith("PERFORMANCE_") || v.key.startsWith("METRICS_")
)
"tRPC Configuration": this.newEnvironmentVariables.filter((v) =>
v.key.startsWith("TRPC_")
),
"Batch Processing": this.newEnvironmentVariables.filter((v) =>
v.key.startsWith("BATCH_")
),
"Migration Settings": this.newEnvironmentVariables.filter((v) =>
v.key.startsWith("MIGRATION_")
),
"Security & Performance": this.newEnvironmentVariables.filter(
(v) =>
v.key.startsWith("RATE_LIMIT_") ||
v.key.startsWith("PERFORMANCE_") ||
v.key.startsWith("METRICS_")
),
};
for (const [category, variables] of Object.entries(categories)) {
@@ -355,9 +386,13 @@ export class EnvironmentMigration {
if (addedCount > 0) {
const templatePath = join(process.cwd(), ".env.migration.template");
writeFileSync(templatePath, newEnvContent.join("\n"));
migrationLogger.info("ENV_ADDITION", `Created environment template with ${addedCount} new variables`, {
templatePath
});
migrationLogger.info(
"ENV_ADDITION",
`Created environment template with ${addedCount} new variables`,
{
templatePath,
}
);
}
}
@@ -365,12 +400,17 @@ export class EnvironmentMigration {
currentEnv: Record<string, string>,
result: MigrationResult
): Promise<void> {
migrationLogger.info("ENV_DEPRECATION", "Checking for deprecated environment variables");
migrationLogger.info(
"ENV_DEPRECATION",
"Checking for deprecated environment variables"
);
for (const deprecatedKey of this.deprecatedVariables) {
if (currentEnv[deprecatedKey]) {
result.deprecated.push(deprecatedKey);
result.warnings.push(`Deprecated environment variable found: ${deprecatedKey}`);
result.warnings.push(
`Deprecated environment variable found: ${deprecatedKey}`
);
}
}
}
@@ -393,48 +433,70 @@ This guide helps you migrate your environment configuration for the new tRPC and
### 1. Add New Environment Variables
${result.added.length > 0 ? `
${
result.added.length > 0
? `
The following new environment variables need to be added to your \`.env.local\` file:
${result.added.map(key => {
const config = this.newEnvironmentVariables.find(v => v.key === key);
return `
${result.added
.map((key) => {
const config = this.newEnvironmentVariables.find((v) => v.key === key);
return `
#### ${key}
- **Description**: ${config?.description}
- **Default**: ${config?.defaultValue || 'Not set'}
- **Required**: ${config?.required ? 'Yes' : 'No'}
${config?.example ? `- **Example**: ${config.example}` : ''}
- **Default**: ${config?.defaultValue || "Not set"}
- **Required**: ${config?.required ? "Yes" : "No"}
${config?.example ? `- **Example**: ${config.example}` : ""}
`;
}).join('')}
` : 'No new environment variables need to be added.'}
})
.join("")}
`
: "No new environment variables need to be added."
}
### 2. Update Existing Variables
${result.updated.length > 0 ? `
${
result.updated.length > 0
? `
The following variables already exist but may need review:
${result.updated.map(key => `- ${key}`).join('\n')}
` : 'No existing variables need updates.'}
${result.updated.map((key) => `- ${key}`).join("\n")}
`
: "No existing variables need updates."
}
### 3. Handle Deprecated Variables
${result.deprecated.length > 0 ? `
${
result.deprecated.length > 0
? `
The following variables are deprecated and should be removed:
${result.deprecated.map(key => `- ${key}`).join('\n')}
` : 'No deprecated variables found.'}
${result.deprecated.map((key) => `- ${key}`).join("\n")}
`
: "No deprecated variables found."
}
## Errors and Warnings
${result.errors.length > 0 ? `
${
result.errors.length > 0
? `
### Errors (Must Fix)
${result.errors.map(error => `- ${error}`).join('\n')}
` : ''}
${result.errors.map((error) => `- ${error}`).join("\n")}
`
: ""
}
${result.warnings.length > 0 ? `
${
result.warnings.length > 0
? `
### Warnings (Recommended Fixes)
${result.warnings.map(warning => `- ${warning}`).join('\n')}
` : ''}
${result.warnings.map((warning) => `- ${warning}`).join("\n")}
`
: ""
}
## Next Steps
@@ -469,7 +531,11 @@ pnpm migration:test-batch
const guidePath = join(process.cwd(), "ENVIRONMENT_MIGRATION_GUIDE.md");
writeFileSync(guidePath, guide);
migrationLogger.info("MIGRATION_GUIDE", "Created environment migration guide", { guidePath });
migrationLogger.info(
"MIGRATION_GUIDE",
"Created environment migration guide",
{ guidePath }
);
}
private async createExampleEnvironmentFile(): Promise<void> {
@@ -558,7 +624,9 @@ PORT="3000"
const examplePath = join(process.cwd(), ".env.example");
writeFileSync(examplePath, example);
migrationLogger.info("EXAMPLE_ENV", "Created example environment file", { examplePath });
migrationLogger.info("EXAMPLE_ENV", "Created example environment file", {
examplePath,
});
}
/**
@@ -571,7 +639,7 @@ PORT="3000"
warnings: [],
added: [],
deprecated: [],
updated: []
updated: [],
};
const currentEnv = this.readCurrentEnvironment();
@@ -581,7 +649,9 @@ PORT="3000"
const value = currentEnv[config.key];
if (config.required && !value) {
result.errors.push(`Required environment variable missing: ${config.key}`);
result.errors.push(
`Required environment variable missing: ${config.key}`
);
}
if (value && config.validationRegex) {
@@ -604,56 +674,58 @@ if (import.meta.url === `file://${process.argv[1]}`) {
const command = process.argv[2];
if (command === "validate") {
migration.validateEnvironmentConfiguration()
migration
.validateEnvironmentConfiguration()
.then((result) => {
console.log('\n=== ENVIRONMENT VALIDATION RESULTS ===');
console.log(`Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== ENVIRONMENT VALIDATION RESULTS ===");
console.log(`Success: ${result.success ? "✅" : "❌"}`);
if (result.errors.length > 0) {
console.log('\n❌ ERRORS:');
result.errors.forEach(error => console.log(` - ${error}`));
console.log("\n❌ ERRORS:");
result.errors.forEach((error) => console.log(` - ${error}`));
}
if (result.warnings.length > 0) {
console.log('\n⚠ WARNINGS:');
result.warnings.forEach(warning => console.log(` - ${warning}`));
console.log("\n⚠ WARNINGS:");
result.warnings.forEach((warning) => console.log(` - ${warning}`));
}
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Validation failed:', error);
console.error("Validation failed:", error);
process.exit(1);
});
} else {
migration.migrateEnvironment()
migration
.migrateEnvironment()
.then((result) => {
console.log('\n=== ENVIRONMENT MIGRATION RESULTS ===');
console.log(`Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== ENVIRONMENT MIGRATION RESULTS ===");
console.log(`Success: ${result.success ? "✅" : "❌"}`);
console.log(`Added: ${result.added.length} variables`);
console.log(`Updated: ${result.updated.length} variables`);
console.log(`Deprecated: ${result.deprecated.length} variables`);
if (result.errors.length > 0) {
console.log('\n❌ ERRORS:');
result.errors.forEach(error => console.log(` - ${error}`));
console.log("\n❌ ERRORS:");
result.errors.forEach((error) => console.log(` - ${error}`));
}
if (result.warnings.length > 0) {
console.log('\n⚠ WARNINGS:');
result.warnings.forEach(warning => console.log(` - ${warning}`));
console.log("\n⚠ WARNINGS:");
result.warnings.forEach((warning) => console.log(` - ${warning}`));
}
console.log('\n📋 Next Steps:');
console.log('1. Review ENVIRONMENT_MIGRATION_GUIDE.md');
console.log('2. Update your .env.local file with new variables');
console.log('3. Run: pnpm migration:validate-env');
console.log("\n📋 Next Steps:");
console.log("1. Review ENVIRONMENT_MIGRATION_GUIDE.md");
console.log("2. Update your .env.local file with new variables");
console.log("3. Run: pnpm migration:validate-env");
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Migration failed:', error);
console.error("Migration failed:", error);
process.exit(1);
});
}
}
}

View File

@@ -39,21 +39,39 @@ export class HealthChecker {
const checks: HealthCheckResult[] = [];
try {
migrationLogger.startStep("HEALTH_CHECKS", "Running comprehensive health checks");
migrationLogger.startStep(
"HEALTH_CHECKS",
"Running comprehensive health checks"
);
// Define all health checks
const healthChecks = [
{ name: "Database Connection", fn: () => this.checkDatabaseConnection() },
{
name: "Database Connection",
fn: () => this.checkDatabaseConnection(),
},
{ name: "Database Schema", fn: () => this.checkDatabaseSchema() },
{ name: "tRPC Endpoints", fn: () => this.checkTRPCEndpoints() },
{ name: "Batch Processing System", fn: () => this.checkBatchProcessingSystem() },
{
name: "Batch Processing System",
fn: () => this.checkBatchProcessingSystem(),
},
{ name: "OpenAI API Access", fn: () => this.checkOpenAIAccess() },
{ name: "Environment Configuration", fn: () => this.checkEnvironmentConfiguration() },
{
name: "Environment Configuration",
fn: () => this.checkEnvironmentConfiguration(),
},
{ name: "File System Access", fn: () => this.checkFileSystemAccess() },
{ name: "Memory Usage", fn: () => this.checkMemoryUsage() },
{ name: "CPU Usage", fn: () => this.checkCPUUsage() },
{ name: "Application Performance", fn: () => this.checkApplicationPerformance() },
{ name: "Security Configuration", fn: () => this.checkSecurityConfiguration() },
{
name: "Application Performance",
fn: () => this.checkApplicationPerformance(),
},
{
name: "Security Configuration",
fn: () => this.checkSecurityConfiguration(),
},
{ name: "Logging System", fn: () => this.checkLoggingSystem() },
];
@@ -64,8 +82,10 @@ export class HealthChecker {
}
const totalDuration = Date.now() - startTime;
const failedChecks = checks.filter(c => !c.success).length;
const score = Math.round(((checks.length - failedChecks) / checks.length) * 100);
const failedChecks = checks.filter((c) => !c.success).length;
const score = Math.round(
((checks.length - failedChecks) / checks.length) * 100
);
const result: SystemHealthResult = {
success: failedChecks === 0,
@@ -78,13 +98,19 @@ export class HealthChecker {
if (result.success) {
migrationLogger.completeStep("HEALTH_CHECKS");
} else {
migrationLogger.failStep("HEALTH_CHECKS", new Error(`${failedChecks} health checks failed`));
migrationLogger.failStep(
"HEALTH_CHECKS",
new Error(`${failedChecks} health checks failed`)
);
}
return result;
} catch (error) {
migrationLogger.error("HEALTH_CHECKS", "Health check system failed", error as Error);
migrationLogger.error(
"HEALTH_CHECKS",
"Health check system failed",
error as Error
);
throw error;
} finally {
await this.prisma.$disconnect();
@@ -93,7 +119,11 @@ export class HealthChecker {
private async runSingleHealthCheck(
name: string,
checkFn: () => Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }>
checkFn: () => Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}>
): Promise<HealthCheckResult> {
const startTime = Date.now();
@@ -112,16 +142,26 @@ export class HealthChecker {
};
if (result.success) {
migrationLogger.debug("HEALTH_CHECK", `${name} passed`, { duration, details: result.details });
migrationLogger.debug("HEALTH_CHECK", `${name} passed`, {
duration,
details: result.details,
});
} else {
migrationLogger.warn("HEALTH_CHECK", `${name} failed`, { duration, error: result.error?.message });
migrationLogger.warn("HEALTH_CHECK", `${name} failed`, {
duration,
error: result.error?.message,
});
}
return healthResult;
} catch (error) {
const duration = Date.now() - startTime;
migrationLogger.error("HEALTH_CHECK", `💥 ${name} crashed`, error as Error, { duration });
migrationLogger.error(
"HEALTH_CHECK",
`💥 ${name} crashed`,
error as Error,
{ duration }
);
return {
name,
@@ -132,7 +172,11 @@ export class HealthChecker {
}
}
private async checkDatabaseConnection(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkDatabaseConnection(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const startTime = Date.now();
await this.prisma.$queryRaw`SELECT 1`;
@@ -149,19 +193,22 @@ export class HealthChecker {
success: connectionTests.length === 3,
details: {
queryTime,
connectionPoolTest: "passed"
}
connectionPoolTest: "passed",
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkDatabaseSchema(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkDatabaseSchema(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Check critical tables
const tableChecks = await Promise.allSettled([
@@ -172,35 +219,40 @@ export class HealthChecker {
this.prisma.aIProcessingRequest.findFirst(),
]);
const failedTables = tableChecks.filter(result => result.status === 'rejected').length;
const failedTables = tableChecks.filter(
(result) => result.status === "rejected"
).length;
// Check for critical indexes
const indexCheck = await this.prisma.$queryRaw<{count: string}[]>`
const indexCheck = await this.prisma.$queryRaw<{ count: string }[]>`
SELECT COUNT(*) as count
FROM pg_indexes
WHERE tablename IN ('Session', 'AIProcessingRequest', 'AIBatchRequest')
`;
const indexCount = parseInt(indexCheck[0]?.count || '0');
const indexCount = parseInt(indexCheck[0]?.count || "0");
return {
success: failedTables === 0,
details: {
accessibleTables: tableChecks.length - failedTables,
totalTables: tableChecks.length,
indexes: indexCount
}
indexes: indexCount,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkTRPCEndpoints(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkTRPCEndpoints(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const baseUrl = process.env.NEXTAUTH_URL || "http://localhost:3000";
@@ -224,8 +276,11 @@ export class HealthChecker {
);
const successfulEndpoints = results.filter(
result => result.status === 'fulfilled' &&
(result.value.status === 200 || result.value.status === 401 || result.value.status === 403)
(result) =>
result.status === "fulfilled" &&
(result.value.status === 200 ||
result.value.status === 401 ||
result.value.status === 403)
).length;
return {
@@ -233,28 +288,32 @@ export class HealthChecker {
details: {
testedEndpoints: endpoints.length,
successfulEndpoints,
endpoints: results.map(r =>
r.status === 'fulfilled' ? r.value : { error: r.reason.message }
)
}
endpoints: results.map((r) =>
r.status === "fulfilled" ? r.value : { error: r.reason.message }
),
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkBatchProcessingSystem(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkBatchProcessingSystem(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Check batch processing components
const batchEnabled = process.env.BATCH_PROCESSING_ENABLED === "true";
// Test database components
const batchRequestsCount = await this.prisma.aIBatchRequest.count();
const processingRequestsCount = await this.prisma.aIProcessingRequest.count();
const processingRequestsCount =
await this.prisma.aIProcessingRequest.count();
// Check if batch processor can be imported
let batchProcessorAvailable = false;
@@ -267,7 +326,7 @@ export class HealthChecker {
// Check batch status distribution
const batchStatuses = await this.prisma.aIBatchRequest.groupBy({
by: ['status'],
by: ["status"],
_count: { status: true },
});
@@ -279,20 +338,23 @@ export class HealthChecker {
batchRequests: batchRequestsCount,
processingRequests: processingRequestsCount,
statusDistribution: Object.fromEntries(
batchStatuses.map(s => [s.status, s._count.status])
)
}
batchStatuses.map((s) => [s.status, s._count.status])
),
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkOpenAIAccess(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkOpenAIAccess(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const apiKey = process.env.OPENAI_API_KEY;
const mockMode = process.env.OPENAI_MOCK_MODE === "true";
@@ -300,21 +362,21 @@ export class HealthChecker {
if (mockMode) {
return {
success: true,
details: { mode: "mock", available: true }
details: { mode: "mock", available: true },
};
}
if (!apiKey) {
return {
success: false,
error: new Error("OPENAI_API_KEY not configured")
error: new Error("OPENAI_API_KEY not configured"),
};
}
// Test API with a simple request
const response = await fetch("https://api.openai.com/v1/models", {
headers: {
"Authorization": `Bearer ${apiKey}`,
Authorization: `Bearer ${apiKey}`,
},
});
@@ -326,35 +388,36 @@ export class HealthChecker {
mode: "live",
available: response.ok,
status: response.status,
responseTime: responseTime
}
responseTime: responseTime,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkEnvironmentConfiguration(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkEnvironmentConfiguration(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const requiredVars = [
"DATABASE_URL",
"NEXTAUTH_SECRET",
"NEXTAUTH_URL"
];
const requiredVars = ["DATABASE_URL", "NEXTAUTH_SECRET", "NEXTAUTH_URL"];
const missingVars = requiredVars.filter(varName => !process.env[varName]);
const missingVars = requiredVars.filter(
(varName) => !process.env[varName]
);
const newVars = [
"BATCH_PROCESSING_ENABLED",
"TRPC_ENDPOINT_URL",
"BATCH_CREATE_INTERVAL"
"BATCH_CREATE_INTERVAL",
];
const missingNewVars = newVars.filter(varName => !process.env[varName]);
const missingNewVars = newVars.filter((varName) => !process.env[varName]);
return {
success: missingVars.length === 0,
@@ -364,19 +427,22 @@ export class HealthChecker {
newVarsPresent: newVars.length - missingNewVars.length,
totalNewVars: newVars.length,
missingRequired: missingVars,
missingNew: missingNewVars
}
missingNew: missingNewVars,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkFileSystemAccess(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkFileSystemAccess(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const fs = await import("node:fs/promises");
const path = await import("node:path");
@@ -392,7 +458,9 @@ export class HealthChecker {
} catch (error) {
return {
success: false,
error: new Error(`Cannot write to logs directory: ${(error as Error).message}`)
error: new Error(
`Cannot write to logs directory: ${(error as Error).message}`
),
};
}
@@ -402,7 +470,7 @@ export class HealthChecker {
} catch (error) {
return {
success: false,
error: new Error("Cannot access package.json")
error: new Error("Cannot access package.json"),
};
}
@@ -410,19 +478,22 @@ export class HealthChecker {
success: true,
details: {
logsWritable: true,
packageJsonReadable: true
}
packageJsonReadable: true,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkMemoryUsage(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkMemoryUsage(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const memUsage = process.memoryUsage();
const usedMB = Math.round(memUsage.heapUsed / 1024 / 1024);
@@ -439,19 +510,22 @@ export class HealthChecker {
heapUsed: usedMB,
heapTotal: totalMB,
external: externalMB,
usagePercent: Math.round(usagePercent)
}
usagePercent: Math.round(usagePercent),
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkCPUUsage(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkCPUUsage(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const cpuUsage = process.cpuUsage();
const userTime = cpuUsage.user / 1000; // Convert to milliseconds
@@ -459,7 +533,7 @@ export class HealthChecker {
// Simple CPU health check - process should be responsive
const startTime = Date.now();
await new Promise(resolve => setTimeout(resolve, 10));
await new Promise((resolve) => setTimeout(resolve, 10));
const responseTime = Date.now() - startTime;
return {
@@ -467,19 +541,22 @@ export class HealthChecker {
details: {
userTime,
systemTime,
responseTime
}
responseTime,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkApplicationPerformance(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkApplicationPerformance(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Test database query performance
const dbStartTime = Date.now();
@@ -502,19 +579,22 @@ export class HealthChecker {
details: {
simpleQueryTime: dbQueryTime,
complexQueryTime: complexQueryTime,
performanceGood: dbQueryTime < 100 && complexQueryTime < 500
}
performanceGood: dbQueryTime < 100 && complexQueryTime < 500,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkSecurityConfiguration(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkSecurityConfiguration(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
const securityIssues: string[] = [];
@@ -542,19 +622,22 @@ export class HealthChecker {
details: {
securityIssues,
hasSecret: !!secret,
rateLimitConfigured: !!process.env.RATE_LIMIT_WINDOW_MS
}
rateLimitConfigured: !!process.env.RATE_LIMIT_WINDOW_MS,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
private async checkLoggingSystem(): Promise<{ success: boolean; details?: Record<string, unknown>; error?: Error }> {
private async checkLoggingSystem(): Promise<{
success: boolean;
details?: Record<string, unknown>;
error?: Error;
}> {
try {
// Test if logging works
const testMessage = `Health check test ${Date.now()}`;
@@ -571,14 +654,13 @@ export class HealthChecker {
success: logsDirExists,
details: {
logsDirExists,
testMessageLogged: true
}
testMessageLogged: true,
},
};
} catch (error) {
return {
success: false,
error: error as Error
error: error as Error,
};
}
}
@@ -590,26 +672,31 @@ export class HealthChecker {
const report = `
# System Health Report
**Overall Status**: ${result.success ? '✅ Healthy' : '❌ Unhealthy'}
**Overall Status**: ${result.success ? "✅ Healthy" : "❌ Unhealthy"}
**Health Score**: ${result.score}/100
**Total Duration**: ${result.totalDuration}ms
**Failed Checks**: ${result.failedChecks}/${result.checks.length}
## Health Check Results
${result.checks.map(check => `
${result.checks
.map(
(check) => `
### ${check.name}
- **Status**: ${check.success ? '✅ Pass' : '❌ Fail'}
- **Status**: ${check.success ? "✅ Pass" : "❌ Fail"}
- **Duration**: ${check.duration}ms
${check.details ? `- **Details**: ${JSON.stringify(check.details, null, 2)}` : ''}
${check.error ? `- **Error**: ${check.error.message}` : ''}
`).join('')}
${check.details ? `- **Details**: ${JSON.stringify(check.details, null, 2)}` : ""}
${check.error ? `- **Error**: ${check.error.message}` : ""}
`
)
.join("")}
## Summary
${result.success ?
'🎉 All health checks passed! The system is operating normally.' :
`⚠️ ${result.failedChecks} health check(s) failed. Please review and address the issues above.`
${
result.success
? "🎉 All health checks passed! The system is operating normally."
: `⚠️ ${result.failedChecks} health check(s) failed. Please review and address the issues above.`
}
---
@@ -626,17 +713,22 @@ if (import.meta.url === `file://${process.argv[1]}`) {
const generateReport = process.argv.includes("--report");
healthChecker.runHealthChecks()
healthChecker
.runHealthChecks()
.then((result) => {
console.log('\n=== SYSTEM HEALTH CHECK RESULTS ===');
console.log(`Overall Health: ${result.success ? '✅ Healthy' : '❌ Unhealthy'}`);
console.log("\n=== SYSTEM HEALTH CHECK RESULTS ===");
console.log(
`Overall Health: ${result.success ? "✅ Healthy" : "❌ Unhealthy"}`
);
console.log(`Health Score: ${result.score}/100`);
console.log(`Total Duration: ${result.totalDuration}ms`);
console.log(`Failed Checks: ${result.failedChecks}/${result.checks.length}`);
console.log(
`Failed Checks: ${result.failedChecks}/${result.checks.length}`
);
console.log('\n=== INDIVIDUAL CHECKS ===');
console.log("\n=== INDIVIDUAL CHECKS ===");
for (const check of result.checks) {
const status = check.success ? '✅' : '❌';
const status = check.success ? "✅" : "❌";
console.log(`${status} ${check.name} (${check.duration}ms)`);
if (check.details) {
@@ -659,7 +751,7 @@ if (import.meta.url === `file://${process.argv[1]}`) {
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Health checks failed:', error);
console.error("Health checks failed:", error);
process.exit(1);
});
}
}

View File

@@ -92,8 +92,8 @@ Working Directory: ${process.cwd()}
// Console output with colors
const colors = {
[LogLevel.DEBUG]: "\x1b[36m", // Cyan
[LogLevel.INFO]: "\x1b[32m", // Green
[LogLevel.WARN]: "\x1b[33m", // Yellow
[LogLevel.INFO]: "\x1b[32m", // Green
[LogLevel.WARN]: "\x1b[33m", // Yellow
[LogLevel.ERROR]: "\x1b[31m", // Red
[LogLevel.CRITICAL]: "\x1b[35m", // Magenta
};
@@ -121,24 +121,50 @@ Working Directory: ${process.cwd()}
appendFileSync(this.logFile, logLine);
}
debug(category: string, message: string, data?: Record<string, unknown>): void {
debug(
category: string,
message: string,
data?: Record<string, unknown>
): void {
this.writeLog(this.createLogEntry(LogLevel.DEBUG, category, message, data));
}
info(category: string, message: string, data?: Record<string, unknown>): void {
info(
category: string,
message: string,
data?: Record<string, unknown>
): void {
this.writeLog(this.createLogEntry(LogLevel.INFO, category, message, data));
}
warn(category: string, message: string, data?: Record<string, unknown>): void {
warn(
category: string,
message: string,
data?: Record<string, unknown>
): void {
this.writeLog(this.createLogEntry(LogLevel.WARN, category, message, data));
}
error(category: string, message: string, error?: Error, data?: Record<string, unknown>): void {
this.writeLog(this.createLogEntry(LogLevel.ERROR, category, message, data, error));
error(
category: string,
message: string,
error?: Error,
data?: Record<string, unknown>
): void {
this.writeLog(
this.createLogEntry(LogLevel.ERROR, category, message, data, error)
);
}
critical(category: string, message: string, error?: Error, data?: Record<string, unknown>): void {
this.writeLog(this.createLogEntry(LogLevel.CRITICAL, category, message, data, error));
critical(
category: string,
message: string,
error?: Error,
data?: Record<string, unknown>
): void {
this.writeLog(
this.createLogEntry(LogLevel.CRITICAL, category, message, data, error)
);
}
/**
@@ -159,7 +185,9 @@ Working Directory: ${process.cwd()}
return result;
} catch (error) {
const duration = Date.now() - startTime;
this.error(category, `Failed ${operationName}`, error as Error, { duration });
this.error(category, `Failed ${operationName}`, error as Error, {
duration,
});
throw error;
}
}
@@ -167,21 +195,35 @@ Working Directory: ${process.cwd()}
/**
* Create a progress tracker for long-running operations
*/
createProgressTracker(category: string, total: number, operationName: string) {
createProgressTracker(
category: string,
total: number,
operationName: string
) {
let completed = 0;
return {
increment: (count: number = 1) => {
completed += count;
const percentage = Math.round((completed / total) * 100);
this.info(category, `${operationName} progress: ${completed}/${total} (${percentage}%)`);
this.info(
category,
`${operationName} progress: ${completed}/${total} (${percentage}%)`
);
},
complete: () => {
this.info(category, `${operationName} completed: ${completed}/${total}`);
this.info(
category,
`${operationName} completed: ${completed}/${total}`
);
},
fail: (error: Error) => {
this.error(category, `${operationName} failed at ${completed}/${total}`, error);
}
this.error(
category,
`${operationName} failed at ${completed}/${total}`,
error
);
},
};
}
@@ -204,7 +246,9 @@ Working Directory: ${process.cwd()}
* Log migration phase transitions
*/
startPhase(phaseName: string, description?: string): void {
this.info("MIGRATION_PHASE", `📋 Starting Phase: ${phaseName}`, { description });
this.info("MIGRATION_PHASE", `📋 Starting Phase: ${phaseName}`, {
description,
});
}
completePhase(phaseName: string): void {
@@ -230,4 +274,4 @@ Time: ${new Date().toISOString()}
}
// Singleton instance for easy access
export const migrationLogger = new MigrationLogger();
export const migrationLogger = new MigrationLogger();

View File

@@ -44,24 +44,83 @@ export class PreDeploymentChecker {
const startTime = Date.now();
try {
migrationLogger.startPhase("PRE_DEPLOYMENT", "Running pre-deployment validation checks");
migrationLogger.startPhase(
"PRE_DEPLOYMENT",
"Running pre-deployment validation checks"
);
// Define all checks to run
const checkSuite = [
{ name: "Environment Configuration", fn: () => this.checkEnvironmentConfiguration(), critical: true },
{ name: "Database Connection", fn: () => this.checkDatabaseConnection(), critical: true },
{ name: "Database Schema", fn: () => this.checkDatabaseSchema(), critical: true },
{ name: "Database Data Integrity", fn: () => this.checkDataIntegrity(), critical: true },
{ name: "Dependencies", fn: () => this.checkDependencies(), critical: true },
{ name: "File System Permissions", fn: () => this.checkFileSystemPermissions(), critical: false },
{ name: "Port Availability", fn: () => this.checkPortAvailability(), critical: true },
{ name: "OpenAI API Access", fn: () => this.checkOpenAIAccess(), critical: true },
{ name: "tRPC Infrastructure", fn: () => this.checkTRPCInfrastructure(), critical: true },
{ name: "Batch Processing Readiness", fn: () => this.checkBatchProcessingReadiness(), critical: true },
{ name: "Security Configuration", fn: () => this.checkSecurityConfiguration(), critical: false },
{ name: "Performance Configuration", fn: () => this.checkPerformanceConfiguration(), critical: false },
{ name: "Backup Validation", fn: () => this.checkBackupValidation(), critical: false },
{ name: "Migration Rollback Readiness", fn: () => this.checkRollbackReadiness(), critical: false },
{
name: "Environment Configuration",
fn: () => this.checkEnvironmentConfiguration(),
critical: true,
},
{
name: "Database Connection",
fn: () => this.checkDatabaseConnection(),
critical: true,
},
{
name: "Database Schema",
fn: () => this.checkDatabaseSchema(),
critical: true,
},
{
name: "Database Data Integrity",
fn: () => this.checkDataIntegrity(),
critical: true,
},
{
name: "Dependencies",
fn: () => this.checkDependencies(),
critical: true,
},
{
name: "File System Permissions",
fn: () => this.checkFileSystemPermissions(),
critical: false,
},
{
name: "Port Availability",
fn: () => this.checkPortAvailability(),
critical: true,
},
{
name: "OpenAI API Access",
fn: () => this.checkOpenAIAccess(),
critical: true,
},
{
name: "tRPC Infrastructure",
fn: () => this.checkTRPCInfrastructure(),
critical: true,
},
{
name: "Batch Processing Readiness",
fn: () => this.checkBatchProcessingReadiness(),
critical: true,
},
{
name: "Security Configuration",
fn: () => this.checkSecurityConfiguration(),
critical: false,
},
{
name: "Performance Configuration",
fn: () => this.checkPerformanceConfiguration(),
critical: false,
},
{
name: "Backup Validation",
fn: () => this.checkBackupValidation(),
critical: false,
},
{
name: "Migration Rollback Readiness",
fn: () => this.checkRollbackReadiness(),
critical: false,
},
];
// Run all checks
@@ -70,8 +129,13 @@ export class PreDeploymentChecker {
}
const totalDuration = Date.now() - startTime;
const criticalFailures = this.checks.filter(c => c.critical && !c.success).length;
const warningCount = this.checks.reduce((sum, c) => sum + c.warnings.length, 0);
const criticalFailures = this.checks.filter(
(c) => c.critical && !c.success
).length;
const warningCount = this.checks.reduce(
(sum, c) => sum + c.warnings.length,
0
);
const result: PreDeploymentResult = {
success: criticalFailures === 0,
@@ -84,13 +148,19 @@ export class PreDeploymentChecker {
if (result.success) {
migrationLogger.completePhase("PRE_DEPLOYMENT");
} else {
migrationLogger.error("PRE_DEPLOYMENT", `Pre-deployment checks failed with ${criticalFailures} critical failures`);
migrationLogger.error(
"PRE_DEPLOYMENT",
`Pre-deployment checks failed with ${criticalFailures} critical failures`
);
}
return result;
} catch (error) {
migrationLogger.error("PRE_DEPLOYMENT", "Pre-deployment check suite failed", error as Error);
migrationLogger.error(
"PRE_DEPLOYMENT",
"Pre-deployment check suite failed",
error as Error
);
throw error;
} finally {
await this.prisma.$disconnect();
@@ -99,7 +169,7 @@ export class PreDeploymentChecker {
private async runSingleCheck(
name: string,
checkFn: () => Promise<Omit<CheckResult, 'name' | 'duration'>>,
checkFn: () => Promise<Omit<CheckResult, "name" | "duration">>,
critical: boolean
): Promise<void> {
const startTime = Date.now();
@@ -120,20 +190,29 @@ export class PreDeploymentChecker {
this.checks.push(checkResult);
if (result.success) {
migrationLogger.info("CHECK", `${name} passed`, { duration, warnings: result.warnings.length });
migrationLogger.info("CHECK", `${name} passed`, {
duration,
warnings: result.warnings.length,
});
} else {
const level = critical ? "ERROR" : "WARN";
migrationLogger[level.toLowerCase() as 'error' | 'warn']("CHECK", `${name} failed`, undefined, {
errors: result.errors.length,
warnings: result.warnings.length,
duration
});
migrationLogger[level.toLowerCase() as "error" | "warn"](
"CHECK",
`${name} failed`,
undefined,
{
errors: result.errors.length,
warnings: result.warnings.length,
duration,
}
);
}
if (result.warnings.length > 0) {
migrationLogger.warn("CHECK", `${name} has warnings`, { warnings: result.warnings });
migrationLogger.warn("CHECK", `${name} has warnings`, {
warnings: result.warnings,
});
}
} catch (error) {
const duration = Date.now() - startTime;
const checkResult: CheckResult = {
@@ -146,11 +225,15 @@ export class PreDeploymentChecker {
};
this.checks.push(checkResult);
migrationLogger.error("CHECK", `💥 ${name} crashed`, error as Error, { duration });
migrationLogger.error("CHECK", `💥 ${name} crashed`, error as Error, {
duration,
});
}
}
private async checkEnvironmentConfiguration(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkEnvironmentConfiguration(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -163,9 +246,9 @@ export class PreDeploymentChecker {
// Additional environment checks
const requiredVars = [
'DATABASE_URL',
'NEXTAUTH_SECRET',
'OPENAI_API_KEY'
"DATABASE_URL",
"NEXTAUTH_SECRET",
"OPENAI_API_KEY",
];
for (const varName of requiredVars) {
@@ -175,17 +258,13 @@ export class PreDeploymentChecker {
}
// Check new variables
const newVars = [
'BATCH_PROCESSING_ENABLED',
'TRPC_ENDPOINT_URL'
];
const newVars = ["BATCH_PROCESSING_ENABLED", "TRPC_ENDPOINT_URL"];
for (const varName of newVars) {
if (!process.env[varName]) {
warnings.push(`New environment variable not set: ${varName}`);
}
}
} catch (error) {
errors.push(`Environment validation failed: ${(error as Error).message}`);
}
@@ -197,7 +276,9 @@ export class PreDeploymentChecker {
};
}
private async checkDatabaseConnection(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkDatabaseConnection(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -215,7 +296,6 @@ export class PreDeploymentChecker {
if (connections.length !== 3) {
warnings.push("Connection pooling may have issues");
}
} catch (error) {
errors.push(`Database connection failed: ${(error as Error).message}`);
}
@@ -227,7 +307,9 @@ export class PreDeploymentChecker {
};
}
private async checkDatabaseSchema(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkDatabaseSchema(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const validator = new DatabaseValidator();
try {
@@ -247,7 +329,9 @@ export class PreDeploymentChecker {
}
}
private async checkDataIntegrity(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkDataIntegrity(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -257,11 +341,13 @@ export class PreDeploymentChecker {
const importCount = await this.prisma.sessionImport.count();
if (sessionCount === 0 && importCount === 0) {
warnings.push("No session data found - this may be a fresh installation");
warnings.push(
"No session data found - this may be a fresh installation"
);
}
// Check for orphaned processing status records
const orphanedStatus = await this.prisma.$queryRaw<{count: bigint}[]>`
const orphanedStatus = await this.prisma.$queryRaw<{ count: bigint }[]>`
SELECT COUNT(*) as count
FROM "SessionProcessingStatus" sps
LEFT JOIN "Session" s ON sps."sessionId" = s.id
@@ -269,9 +355,10 @@ export class PreDeploymentChecker {
`;
if (orphanedStatus[0]?.count > 0) {
warnings.push(`Found ${orphanedStatus[0].count} orphaned processing status records`);
warnings.push(
`Found ${orphanedStatus[0].count} orphaned processing status records`
);
}
} catch (error) {
errors.push(`Data integrity check failed: ${(error as Error).message}`);
}
@@ -283,7 +370,9 @@ export class PreDeploymentChecker {
};
}
private async checkDependencies(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkDependencies(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -307,19 +396,21 @@ export class PreDeploymentChecker {
];
for (const dep of requiredDeps) {
if (!packageJson.dependencies?.[dep] && !packageJson.devDependencies?.[dep]) {
if (
!packageJson.dependencies?.[dep] &&
!packageJson.devDependencies?.[dep]
) {
errors.push(`Missing required dependency: ${dep}`);
}
}
// Check Node.js version
const nodeVersion = process.version;
const majorVersion = parseInt(nodeVersion.slice(1).split('.')[0]);
const majorVersion = parseInt(nodeVersion.slice(1).split(".")[0]);
if (majorVersion < 18) {
errors.push(`Node.js ${nodeVersion} is too old. Requires Node.js 18+`);
}
} catch (error) {
errors.push(`Dependency check failed: ${(error as Error).message}`);
}
@@ -331,7 +422,9 @@ export class PreDeploymentChecker {
};
}
private async checkFileSystemPermissions(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkFileSystemPermissions(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -346,7 +439,9 @@ export class PreDeploymentChecker {
await fs.writeFile(testFile, "test");
await fs.unlink(testFile);
} catch (error) {
errors.push(`Cannot write to logs directory: ${(error as Error).message}`);
errors.push(
`Cannot write to logs directory: ${(error as Error).message}`
);
}
// Check if we can write to backups directory
@@ -357,11 +452,14 @@ export class PreDeploymentChecker {
await fs.writeFile(testFile, "test");
await fs.unlink(testFile);
} catch (error) {
warnings.push(`Cannot write to backups directory: ${(error as Error).message}`);
warnings.push(
`Cannot write to backups directory: ${(error as Error).message}`
);
}
} catch (error) {
errors.push(`File system permission check failed: ${(error as Error).message}`);
errors.push(
`File system permission check failed: ${(error as Error).message}`
);
}
return {
@@ -371,7 +469,9 @@ export class PreDeploymentChecker {
};
}
private async checkPortAvailability(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkPortAvailability(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -396,9 +496,10 @@ export class PreDeploymentChecker {
resolve();
});
});
} catch (error) {
errors.push(`Port availability check failed: ${(error as Error).message}`);
errors.push(
`Port availability check failed: ${(error as Error).message}`
);
}
return {
@@ -408,7 +509,9 @@ export class PreDeploymentChecker {
};
}
private async checkOpenAIAccess(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkOpenAIAccess(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -423,19 +526,20 @@ export class PreDeploymentChecker {
// Test API access (simple models list call)
const response = await fetch("https://api.openai.com/v1/models", {
headers: {
"Authorization": `Bearer ${apiKey}`,
Authorization: `Bearer ${apiKey}`,
},
});
if (!response.ok) {
errors.push(`OpenAI API access failed: ${response.status} ${response.statusText}`);
errors.push(
`OpenAI API access failed: ${response.status} ${response.statusText}`
);
} else {
const data = await response.json();
if (!data.data || !Array.isArray(data.data)) {
warnings.push("OpenAI API returned unexpected response format");
}
}
} catch (error) {
errors.push(`OpenAI API check failed: ${(error as Error).message}`);
}
@@ -447,7 +551,9 @@ export class PreDeploymentChecker {
};
}
private async checkTRPCInfrastructure(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkTRPCInfrastructure(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -475,9 +581,10 @@ export class PreDeploymentChecker {
} catch (error) {
errors.push(`Cannot import tRPC router: ${(error as Error).message}`);
}
} catch (error) {
errors.push(`tRPC infrastructure check failed: ${(error as Error).message}`);
errors.push(
`tRPC infrastructure check failed: ${(error as Error).message}`
);
}
return {
@@ -487,16 +594,15 @@ export class PreDeploymentChecker {
};
}
private async checkBatchProcessingReadiness(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkBatchProcessingReadiness(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
try {
// Check if batch processing files exist
const batchFiles = [
"lib/batchProcessor.ts",
"lib/batchScheduler.ts",
];
const batchFiles = ["lib/batchProcessor.ts", "lib/batchScheduler.ts"];
for (const file of batchFiles) {
const fullPath = join(process.cwd(), file);
@@ -506,29 +612,32 @@ export class PreDeploymentChecker {
}
// Check database readiness for batch processing
const batchTableExists = await this.prisma.$queryRaw<{count: string}[]>`
const batchTableExists = await this.prisma.$queryRaw<{ count: string }[]>`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_name = 'AIBatchRequest'
`;
if (parseInt(batchTableExists[0]?.count || '0') === 0) {
if (parseInt(batchTableExists[0]?.count || "0") === 0) {
errors.push("AIBatchRequest table not found");
}
// Check if batch status enum exists
const batchStatusExists = await this.prisma.$queryRaw<{count: string}[]>`
const batchStatusExists = await this.prisma.$queryRaw<
{ count: string }[]
>`
SELECT COUNT(*) as count
FROM pg_type
WHERE typname = 'AIBatchRequestStatus'
`;
if (parseInt(batchStatusExists[0]?.count || '0') === 0) {
if (parseInt(batchStatusExists[0]?.count || "0") === 0) {
errors.push("AIBatchRequestStatus enum not found");
}
} catch (error) {
errors.push(`Batch processing readiness check failed: ${(error as Error).message}`);
errors.push(
`Batch processing readiness check failed: ${(error as Error).message}`
);
}
return {
@@ -538,7 +647,9 @@ export class PreDeploymentChecker {
};
}
private async checkSecurityConfiguration(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkSecurityConfiguration(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -556,13 +667,17 @@ export class PreDeploymentChecker {
// Check if we're running in production mode with proper settings
if (process.env.NODE_ENV === "production") {
if (!process.env.NEXTAUTH_URL || process.env.NEXTAUTH_URL.includes("localhost")) {
if (
!process.env.NEXTAUTH_URL ||
process.env.NEXTAUTH_URL.includes("localhost")
) {
warnings.push("NEXTAUTH_URL should not use localhost in production");
}
}
} catch (error) {
warnings.push(`Security configuration check failed: ${(error as Error).message}`);
warnings.push(
`Security configuration check failed: ${(error as Error).message}`
);
}
return {
@@ -572,31 +687,44 @@ export class PreDeploymentChecker {
};
}
private async checkPerformanceConfiguration(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkPerformanceConfiguration(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
try {
// Check database connection limits
const connectionLimit = parseInt(process.env.DATABASE_CONNECTION_LIMIT || "20");
const connectionLimit = parseInt(
process.env.DATABASE_CONNECTION_LIMIT || "20"
);
if (connectionLimit < 10) {
warnings.push("DATABASE_CONNECTION_LIMIT may be too low for production");
warnings.push(
"DATABASE_CONNECTION_LIMIT may be too low for production"
);
}
// Check batch processing configuration
const batchMaxRequests = parseInt(process.env.BATCH_MAX_REQUESTS || "1000");
const batchMaxRequests = parseInt(
process.env.BATCH_MAX_REQUESTS || "1000"
);
if (batchMaxRequests > 50000) {
warnings.push("BATCH_MAX_REQUESTS exceeds OpenAI limits");
}
// Check session processing concurrency
const concurrency = parseInt(process.env.SESSION_PROCESSING_CONCURRENCY || "5");
const concurrency = parseInt(
process.env.SESSION_PROCESSING_CONCURRENCY || "5"
);
if (concurrency > 10) {
warnings.push("High SESSION_PROCESSING_CONCURRENCY may overwhelm the system");
warnings.push(
"High SESSION_PROCESSING_CONCURRENCY may overwhelm the system"
);
}
} catch (error) {
warnings.push(`Performance configuration check failed: ${(error as Error).message}`);
warnings.push(
`Performance configuration check failed: ${(error as Error).message}`
);
}
return {
@@ -606,7 +734,9 @@ export class PreDeploymentChecker {
};
}
private async checkBackupValidation(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkBackupValidation(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -625,7 +755,6 @@ export class PreDeploymentChecker {
if (!existsSync(backupDir)) {
warnings.push("Backup directory does not exist");
}
} catch (error) {
warnings.push(`Backup validation failed: ${(error as Error).message}`);
}
@@ -637,7 +766,9 @@ export class PreDeploymentChecker {
};
}
private async checkRollbackReadiness(): Promise<Omit<CheckResult, 'name' | 'duration'>> {
private async checkRollbackReadiness(): Promise<
Omit<CheckResult, "name" | "duration">
> {
const errors: string[] = [];
const warnings: string[] = [];
@@ -659,9 +790,10 @@ export class PreDeploymentChecker {
if (process.env.MIGRATION_ROLLBACK_ENABLED !== "true") {
warnings.push("Rollback is disabled - consider enabling for safety");
}
} catch (error) {
warnings.push(`Rollback readiness check failed: ${(error as Error).message}`);
warnings.push(
`Rollback readiness check failed: ${(error as Error).message}`
);
}
return {
@@ -676,41 +808,46 @@ export class PreDeploymentChecker {
if (import.meta.url === `file://${process.argv[1]}`) {
const checker = new PreDeploymentChecker();
checker.runAllChecks()
checker
.runAllChecks()
.then((result) => {
console.log('\n=== PRE-DEPLOYMENT CHECK RESULTS ===');
console.log(`Overall Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== PRE-DEPLOYMENT CHECK RESULTS ===");
console.log(`Overall Success: ${result.success ? "✅" : "❌"}`);
console.log(`Total Duration: ${result.totalDuration}ms`);
console.log(`Critical Failures: ${result.criticalFailures}`);
console.log(`Total Warnings: ${result.warningCount}`);
console.log('\n=== INDIVIDUAL CHECKS ===');
console.log("\n=== INDIVIDUAL CHECKS ===");
for (const check of result.checks) {
const status = check.success ? '✅' : '❌';
const critical = check.critical ? ' (CRITICAL)' : '';
const status = check.success ? "✅" : "❌";
const critical = check.critical ? " (CRITICAL)" : "";
console.log(`${status} ${check.name}${critical} (${check.duration}ms)`);
if (check.errors.length > 0) {
check.errors.forEach(error => console.log(`${error}`));
check.errors.forEach((error) => console.log(`${error}`));
}
if (check.warnings.length > 0) {
check.warnings.forEach(warning => console.log(` ⚠️ ${warning}`));
check.warnings.forEach((warning) => console.log(` ⚠️ ${warning}`));
}
}
if (!result.success) {
console.log('\n❌ DEPLOYMENT BLOCKED - Fix critical issues before proceeding');
console.log(
"\n❌ DEPLOYMENT BLOCKED - Fix critical issues before proceeding"
);
} else if (result.warningCount > 0) {
console.log('\n⚠ DEPLOYMENT ALLOWED - Review warnings before proceeding');
console.log(
"\n⚠ DEPLOYMENT ALLOWED - Review warnings before proceeding"
);
} else {
console.log('\n✅ DEPLOYMENT READY - All checks passed');
console.log("\n✅ DEPLOYMENT READY - All checks passed");
}
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Pre-deployment checks failed:', error);
console.error("Pre-deployment checks failed:", error);
process.exit(1);
});
}
}

View File

@@ -78,7 +78,7 @@ export class RollbackManager {
migrationLogger.completePhase("ROLLBACK");
migrationLogger.info("ROLLBACK", "Rollback completed successfully", {
totalDuration,
steps: this.completedSteps.length
steps: this.completedSteps.length,
});
return {
@@ -86,7 +86,6 @@ export class RollbackManager {
completedSteps: this.completedSteps,
totalDuration,
};
} catch (error) {
const totalDuration = Date.now() - startTime;
@@ -105,7 +104,10 @@ export class RollbackManager {
* Create rollback snapshot before deployment
*/
async createRollbackSnapshot(): Promise<string> {
migrationLogger.startStep("ROLLBACK_SNAPSHOT", "Creating rollback snapshot");
migrationLogger.startStep(
"ROLLBACK_SNAPSHOT",
"Creating rollback snapshot"
);
try {
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
@@ -127,10 +129,11 @@ export class RollbackManager {
await this.saveDeploymentState(snapshotDir);
migrationLogger.completeStep("ROLLBACK_SNAPSHOT");
migrationLogger.info("ROLLBACK_SNAPSHOT", "Rollback snapshot created", { snapshotDir });
migrationLogger.info("ROLLBACK_SNAPSHOT", "Rollback snapshot created", {
snapshotDir,
});
return snapshotDir;
} catch (error) {
migrationLogger.failStep("ROLLBACK_SNAPSHOT", error as Error);
throw error;
@@ -194,7 +197,10 @@ export class RollbackManager {
if (this.options.rollbackEnvironment) {
await this.rollbackEnvironment();
} else {
migrationLogger.info("ENV_ROLLBACK", "Environment rollback skipped");
migrationLogger.info(
"ENV_ROLLBACK",
"Environment rollback skipped"
);
}
},
},
@@ -230,11 +236,14 @@ export class RollbackManager {
private async executeRollbackStep(step: RollbackStep): Promise<void> {
try {
migrationLogger.startStep(step.name.replace(/\s+/g, '_').toUpperCase(), step.description);
migrationLogger.startStep(
step.name.replace(/\s+/g, "_").toUpperCase(),
step.description
);
if (this.options.dryRun) {
migrationLogger.info("DRY_RUN", `Would execute rollback: ${step.name}`);
await new Promise(resolve => setTimeout(resolve, 100));
await new Promise((resolve) => setTimeout(resolve, 100));
} else {
await step.execute();
}
@@ -243,51 +252,66 @@ export class RollbackManager {
if (step.verify && !this.options.dryRun) {
const verified = await step.verify();
if (!verified) {
throw new Error(`Verification failed for rollback step: ${step.name}`);
throw new Error(
`Verification failed for rollback step: ${step.name}`
);
}
}
migrationLogger.completeStep(step.name.replace(/\s+/g, '_').toUpperCase());
migrationLogger.completeStep(
step.name.replace(/\s+/g, "_").toUpperCase()
);
} catch (error) {
migrationLogger.failStep(step.name.replace(/\s+/g, '_').toUpperCase(), error as Error);
migrationLogger.failStep(
step.name.replace(/\s+/g, "_").toUpperCase(),
error as Error
);
if (step.critical) {
throw error;
} else {
migrationLogger.warn("ROLLBACK_STEP", `Non-critical rollback step failed: ${step.name}`, {
error: (error as Error).message
});
migrationLogger.warn(
"ROLLBACK_STEP",
`Non-critical rollback step failed: ${step.name}`,
{
error: (error as Error).message,
}
);
}
}
}
private async confirmRollback(): Promise<void> {
console.log('\n⚠ ROLLBACK CONFIRMATION REQUIRED ⚠️');
console.log('This will restore the system to a previous state.');
console.log('The following actions will be performed:');
console.log("\n⚠ ROLLBACK CONFIRMATION REQUIRED ⚠️");
console.log("This will restore the system to a previous state.");
console.log("The following actions will be performed:");
if (this.options.rollbackDatabase) {
console.log(' - Restore database from backup');
console.log(" - Restore database from backup");
}
if (this.options.rollbackCode) {
console.log(' - Restore application code to previous version');
console.log(" - Restore application code to previous version");
}
if (this.options.rollbackEnvironment) {
console.log(' - Restore environment configuration');
console.log(" - Restore environment configuration");
}
console.log('\nThis operation cannot be easily undone.');
console.log("\nThis operation cannot be easily undone.");
// In a real implementation, you would prompt for user input
// For automation purposes, we'll check for a confirmation flag
if (!process.env.ROLLBACK_CONFIRMED) {
throw new Error('Rollback not confirmed. Set ROLLBACK_CONFIRMED=true to proceed.');
throw new Error(
"Rollback not confirmed. Set ROLLBACK_CONFIRMED=true to proceed."
);
}
}
private async validateRollbackPrerequisites(): Promise<void> {
migrationLogger.info("ROLLBACK_VALIDATION", "Validating rollback prerequisites");
migrationLogger.info(
"ROLLBACK_VALIDATION",
"Validating rollback prerequisites"
);
// Check if backup exists
if (this.options.rollbackDatabase && this.options.backupPath) {
@@ -301,7 +325,9 @@ export class RollbackManager {
try {
execSync("pg_restore --version", { stdio: "ignore" });
} catch (error) {
throw new Error("pg_restore not found - database rollback not possible");
throw new Error(
"pg_restore not found - database rollback not possible"
);
}
}
@@ -314,7 +340,10 @@ export class RollbackManager {
}
}
migrationLogger.info("ROLLBACK_VALIDATION", "Prerequisites validated successfully");
migrationLogger.info(
"ROLLBACK_VALIDATION",
"Prerequisites validated successfully"
);
}
private async stopServices(): Promise<void> {
@@ -322,18 +351,24 @@ export class RollbackManager {
// In a real deployment, this would stop the actual services
// For this implementation, we'll simulate service stopping
await new Promise(resolve => setTimeout(resolve, 1000));
await new Promise((resolve) => setTimeout(resolve, 1000));
migrationLogger.info("SERVICE_STOP", "Services stopped successfully");
}
private async rollbackDatabase(): Promise<void> {
if (!this.options.backupPath) {
migrationLogger.warn("DB_ROLLBACK", "No backup path specified, skipping database rollback");
migrationLogger.warn(
"DB_ROLLBACK",
"No backup path specified, skipping database rollback"
);
return;
}
migrationLogger.info("DB_ROLLBACK", `Restoring database from backup: ${this.options.backupPath}`);
migrationLogger.info(
"DB_ROLLBACK",
`Restoring database from backup: ${this.options.backupPath}`
);
try {
// Parse database URL
@@ -345,19 +380,26 @@ export class RollbackManager {
const parsed = new URL(dbUrl);
// Drop existing connections
migrationLogger.info("DB_ROLLBACK", "Terminating existing database connections");
migrationLogger.info(
"DB_ROLLBACK",
"Terminating existing database connections"
);
// Restore from backup
const restoreCommand = [
"pg_restore",
"-h", parsed.hostname,
"-p", parsed.port || "5432",
"-U", parsed.username,
"-d", parsed.pathname.slice(1),
"-h",
parsed.hostname,
"-p",
parsed.port || "5432",
"-U",
parsed.username,
"-d",
parsed.pathname.slice(1),
"--clean",
"--if-exists",
"--verbose",
this.options.backupPath
this.options.backupPath,
].join(" ");
migrationLogger.debug("DB_ROLLBACK", `Executing: ${restoreCommand}`);
@@ -370,8 +412,10 @@ export class RollbackManager {
stdio: "pipe",
});
migrationLogger.info("DB_ROLLBACK", "Database rollback completed successfully");
migrationLogger.info(
"DB_ROLLBACK",
"Database rollback completed successfully"
);
} catch (error) {
throw new Error(`Database rollback failed: ${(error as Error).message}`);
}
@@ -393,12 +437,19 @@ export class RollbackManager {
return true;
} catch (error) {
await prisma.$disconnect();
migrationLogger.error("DB_VERIFY", "Database verification failed", error as Error);
migrationLogger.error(
"DB_VERIFY",
"Database verification failed",
error as Error
);
return false;
}
} catch (error) {
migrationLogger.error("DB_VERIFY", "Database verification error", error as Error);
migrationLogger.error(
"DB_VERIFY",
"Database verification error",
error as Error
);
return false;
}
}
@@ -409,55 +460,73 @@ export class RollbackManager {
try {
// Get the previous commit (this is a simplified approach)
const previousCommit = execSync("git rev-parse HEAD~1", {
encoding: "utf8"
encoding: "utf8",
}).trim();
migrationLogger.info("CODE_ROLLBACK", `Rolling back to commit: ${previousCommit}`);
migrationLogger.info(
"CODE_ROLLBACK",
`Rolling back to commit: ${previousCommit}`
);
// Reset to previous commit
execSync(`git reset --hard ${previousCommit}`, { stdio: "pipe" });
migrationLogger.info("CODE_ROLLBACK", "Code rollback completed successfully");
migrationLogger.info(
"CODE_ROLLBACK",
"Code rollback completed successfully"
);
} catch (error) {
throw new Error(`Code rollback failed: ${(error as Error).message}`);
}
}
private async rollbackEnvironment(): Promise<void> {
migrationLogger.info("ENV_ROLLBACK", "Rolling back environment configuration");
migrationLogger.info(
"ENV_ROLLBACK",
"Rolling back environment configuration"
);
try {
// Look for environment backup
const backupFiles = [
".env.local.backup",
".env.backup",
".env.production.backup"
".env.production.backup",
];
let restored = false;
for (const backupFile of backupFiles) {
const backupPath = join(process.cwd(), backupFile);
const targetPath = backupPath.replace('.backup', '');
const targetPath = backupPath.replace(".backup", "");
if (existsSync(backupPath)) {
const backupContent = readFileSync(backupPath, "utf8");
writeFileSync(targetPath, backupContent);
migrationLogger.info("ENV_ROLLBACK", `Restored ${targetPath} from ${backupFile}`);
migrationLogger.info(
"ENV_ROLLBACK",
`Restored ${targetPath} from ${backupFile}`
);
restored = true;
}
}
if (!restored) {
migrationLogger.warn("ENV_ROLLBACK", "No environment backup found to restore");
migrationLogger.warn(
"ENV_ROLLBACK",
"No environment backup found to restore"
);
} else {
migrationLogger.info("ENV_ROLLBACK", "Environment rollback completed successfully");
migrationLogger.info(
"ENV_ROLLBACK",
"Environment rollback completed successfully"
);
}
} catch (error) {
throw new Error(`Environment rollback failed: ${(error as Error).message}`);
throw new Error(
`Environment rollback failed: ${(error as Error).message}`
);
}
}
@@ -472,24 +541,34 @@ export class RollbackManager {
if (existsSync(packageLockBackup)) {
const backupContent = readFileSync(packageLockBackup, "utf8");
writeFileSync(packageLock, backupContent);
migrationLogger.info("DEPS_RESTORE", "Restored package-lock.json from backup");
migrationLogger.info(
"DEPS_RESTORE",
"Restored package-lock.json from backup"
);
}
// Reinstall dependencies
execSync("npm ci", { stdio: "pipe" });
migrationLogger.info("DEPS_RESTORE", "Dependencies restored successfully");
migrationLogger.info(
"DEPS_RESTORE",
"Dependencies restored successfully"
);
} catch (error) {
throw new Error(`Dependencies restoration failed: ${(error as Error).message}`);
throw new Error(
`Dependencies restoration failed: ${(error as Error).message}`
);
}
}
private async restartServices(): Promise<void> {
migrationLogger.info("SERVICE_RESTART", "Restarting services after rollback");
migrationLogger.info(
"SERVICE_RESTART",
"Restarting services after rollback"
);
// In a real deployment, this would restart the actual services
await new Promise(resolve => setTimeout(resolve, 2000));
await new Promise((resolve) => setTimeout(resolve, 2000));
migrationLogger.info("SERVICE_RESTART", "Services restarted successfully");
}
@@ -508,10 +587,14 @@ export class RollbackManager {
// Test basic application functionality
// This would typically involve checking key endpoints or services
migrationLogger.info("ROLLBACK_VERIFY", "Rollback verification successful");
migrationLogger.info(
"ROLLBACK_VERIFY",
"Rollback verification successful"
);
} catch (error) {
throw new Error(`Rollback verification failed: ${(error as Error).message}`);
throw new Error(
`Rollback verification failed: ${(error as Error).message}`
);
}
}
@@ -532,7 +615,11 @@ export class RollbackManager {
private async savePackageSnapshot(snapshotDir: string): Promise<void> {
const fs = await import("node:fs/promises");
const packageFiles = ["package.json", "package-lock.json", "pnpm-lock.yaml"];
const packageFiles = [
"package.json",
"package-lock.json",
"pnpm-lock.yaml",
];
for (const packageFile of packageFiles) {
const packagePath = join(process.cwd(), packageFile);
@@ -547,7 +634,9 @@ export class RollbackManager {
try {
const gitInfo = {
commit: execSync("git rev-parse HEAD", { encoding: "utf8" }).trim(),
branch: execSync("git rev-parse --abbrev-ref HEAD", { encoding: "utf8" }).trim(),
branch: execSync("git rev-parse --abbrev-ref HEAD", {
encoding: "utf8",
}).trim(),
status: execSync("git status --porcelain", { encoding: "utf8" }).trim(),
remotes: execSync("git remote -v", { encoding: "utf8" }).trim(),
};
@@ -557,10 +646,9 @@ export class RollbackManager {
join(snapshotDir, "git-info.json"),
JSON.stringify(gitInfo, null, 2)
);
} catch (error) {
migrationLogger.warn("GIT_SNAPSHOT", "Failed to save git snapshot", {
error: (error as Error).message
error: (error as Error).message,
});
}
}
@@ -617,29 +705,31 @@ if (import.meta.url === `file://${process.argv[1]}`) {
if (command === "snapshot") {
const rollbackManager = new RollbackManager();
rollbackManager.createRollbackSnapshot()
rollbackManager
.createRollbackSnapshot()
.then((snapshotDir) => {
console.log('\n=== ROLLBACK SNAPSHOT CREATED ===');
console.log("\n=== ROLLBACK SNAPSHOT CREATED ===");
console.log(`Snapshot Directory: ${snapshotDir}`);
console.log('\nThe snapshot contains:');
console.log(' - Environment configuration');
console.log(' - Package dependencies');
console.log(' - Git information');
console.log(' - Deployment state');
console.log('\nUse this snapshot for rollback if needed.');
console.log("\nThe snapshot contains:");
console.log(" - Environment configuration");
console.log(" - Package dependencies");
console.log(" - Git information");
console.log(" - Deployment state");
console.log("\nUse this snapshot for rollback if needed.");
process.exit(0);
})
.catch((error) => {
console.error('Snapshot creation failed:', error);
console.error("Snapshot creation failed:", error);
process.exit(1);
});
} else {
const rollbackManager = new RollbackManager(options);
rollbackManager.rollback()
rollbackManager
.rollback()
.then((result) => {
console.log('\n=== ROLLBACK RESULTS ===');
console.log(`Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== ROLLBACK RESULTS ===");
console.log(`Success: ${result.success ? "✅" : "❌"}`);
console.log(`Total Duration: ${result.totalDuration}ms`);
console.log(`Completed Steps: ${result.completedSteps.length}`);
@@ -651,28 +741,28 @@ if (import.meta.url === `file://${process.argv[1]}`) {
console.error(`Error: ${result.error.message}`);
}
console.log('\nCompleted Steps:');
result.completedSteps.forEach(step => console.log(`${step}`));
console.log("\nCompleted Steps:");
result.completedSteps.forEach((step) => console.log(`${step}`));
if (result.success) {
console.log('\n🎉 ROLLBACK SUCCESSFUL!');
console.log('\nNext Steps:');
console.log('1. Verify system functionality');
console.log('2. Monitor logs for any issues');
console.log('3. Investigate root cause of deployment failure');
console.log("\n🎉 ROLLBACK SUCCESSFUL!");
console.log("\nNext Steps:");
console.log("1. Verify system functionality");
console.log("2. Monitor logs for any issues");
console.log("3. Investigate root cause of deployment failure");
} else {
console.log('\n💥 ROLLBACK FAILED!');
console.log('\nNext Steps:');
console.log('1. Check logs for error details');
console.log('2. Manual intervention may be required');
console.log('3. Contact system administrators');
console.log("\n💥 ROLLBACK FAILED!");
console.log("\nNext Steps:");
console.log("1. Check logs for error details");
console.log("2. Manual intervention may be required");
console.log("3. Contact system administrators");
}
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Rollback failed:', error);
console.error("Rollback failed:", error);
process.exit(1);
});
}
}
}

View File

@@ -40,7 +40,8 @@ export class TRPCEndpointTester {
private timeout: number;
constructor(baseUrl?: string, timeout: number = 30000) {
this.baseUrl = baseUrl || process.env.NEXTAUTH_URL || "http://localhost:3000";
this.baseUrl =
baseUrl || process.env.NEXTAUTH_URL || "http://localhost:3000";
this.timeout = timeout;
}
@@ -52,7 +53,10 @@ export class TRPCEndpointTester {
const tests: TestResult[] = [];
try {
migrationLogger.startStep("TRPC_TESTS", "Running tRPC endpoint validation tests");
migrationLogger.startStep(
"TRPC_TESTS",
"Running tRPC endpoint validation tests"
);
// Define test suite
const endpointTests: EndpointTest[] = [
@@ -86,8 +90,8 @@ export class TRPCEndpointTester {
json: {
page: 1,
pageSize: 10,
filters: {}
}
filters: {},
},
},
expectedStatuses: [200, 401, 403],
timeout: 10000,
@@ -155,9 +159,12 @@ export class TRPCEndpointTester {
}
const totalDuration = Date.now() - startTime;
const passedTests = tests.filter(t => t.success).length;
const failedTests = tests.filter(t => !t.success).length;
const criticalFailures = tests.filter(t => !t.success && endpointTests.find(et => et.name === t.name)?.critical).length;
const passedTests = tests.filter((t) => t.success).length;
const failedTests = tests.filter((t) => !t.success).length;
const criticalFailures = tests.filter(
(t) =>
!t.success && endpointTests.find((et) => et.name === t.name)?.critical
).length;
const result: TRPCTestResult = {
success: criticalFailures === 0,
@@ -171,13 +178,19 @@ export class TRPCEndpointTester {
if (result.success) {
migrationLogger.completeStep("TRPC_TESTS");
} else {
migrationLogger.failStep("TRPC_TESTS", new Error(`${criticalFailures} critical tRPC tests failed`));
migrationLogger.failStep(
"TRPC_TESTS",
new Error(`${criticalFailures} critical tRPC tests failed`)
);
}
return result;
} catch (error) {
migrationLogger.error("TRPC_TESTS", "tRPC test suite failed", error as Error);
migrationLogger.error(
"TRPC_TESTS",
"tRPC test suite failed",
error as Error
);
throw error;
}
}
@@ -226,22 +239,26 @@ export class TRPCEndpointTester {
if (success) {
migrationLogger.debug("TRPC_TEST", `${test.name} passed`, {
status: response.status,
duration
duration,
});
} else {
migrationLogger.warn("TRPC_TEST", `${test.name} failed`, {
status: response.status,
expected: test.expectedStatuses,
duration
duration,
});
}
return result;
} catch (error) {
const duration = Date.now() - startTime;
migrationLogger.error("TRPC_TEST", `💥 ${test.name} crashed`, error as Error, { duration });
migrationLogger.error(
"TRPC_TEST",
`💥 ${test.name} crashed`,
error as Error,
{ duration }
);
return {
name: test.name,
@@ -296,7 +313,8 @@ export class TRPCEndpointTester {
const responseData = await response.json();
// Batch requests should return an array of responses
const success = response.ok && Array.isArray(responseData) && responseData.length === 2;
const success =
response.ok && Array.isArray(responseData) && responseData.length === 2;
return {
name: "tRPC Batch Requests",
@@ -305,7 +323,6 @@ export class TRPCEndpointTester {
duration,
response: responseData,
};
} catch (error) {
const duration = Date.now() - startTime;
@@ -367,7 +384,6 @@ export class TRPCEndpointTester {
error: new Error("WebSocket connection failed"),
});
};
} catch (error) {
resolve({
name: "tRPC Subscriptions",
@@ -378,7 +394,6 @@ export class TRPCEndpointTester {
});
}
});
} catch (error) {
const duration = Date.now() - startTime;
@@ -399,7 +414,7 @@ export class TRPCEndpointTester {
const report = `
# tRPC Endpoint Test Report
**Overall Status**: ${result.success ? '✅ All Critical Tests Passed' : '❌ Critical Tests Failed'}
**Overall Status**: ${result.success ? "✅ All Critical Tests Passed" : "❌ Critical Tests Failed"}
**Total Duration**: ${result.totalDuration}ms
**Passed Tests**: ${result.passedTests}/${result.tests.length}
**Failed Tests**: ${result.failedTests}/${result.tests.length}
@@ -407,29 +422,41 @@ export class TRPCEndpointTester {
## Test Results
${result.tests.map(test => `
${result.tests
.map(
(test) => `
### ${test.name}
- **Status**: ${test.success ? '✅ Pass' : '❌ Fail'}
- **Status**: ${test.success ? "✅ Pass" : "❌ Fail"}
- **HTTP Status**: ${test.status}
- **Duration**: ${test.duration}ms
${test.error ? `- **Error**: ${test.error.message}` : ''}
${test.response && typeof test.response === 'object' ? `- **Response**: \`\`\`json\n${JSON.stringify(test.response, null, 2)}\n\`\`\`` : ''}
`).join('')}
${test.error ? `- **Error**: ${test.error.message}` : ""}
${test.response && typeof test.response === "object" ? `- **Response**: \`\`\`json\n${JSON.stringify(test.response, null, 2)}\n\`\`\`` : ""}
`
)
.join("")}
## Summary
${result.success ?
'🎉 All critical tRPC endpoints are working correctly!' :
`⚠️ ${result.criticalFailures} critical endpoint(s) failed. Please review and fix the issues above.`
${
result.success
? "🎉 All critical tRPC endpoints are working correctly!"
: `⚠️ ${result.criticalFailures} critical endpoint(s) failed. Please review and fix the issues above.`
}
## Recommendations
${result.failedTests > 0 ? `
${
result.failedTests > 0
? `
### Failed Tests Analysis
${result.tests.filter(t => !t.success).map(test => `
${result.tests
.filter((t) => !t.success)
.map(
(test) => `
- **${test.name}**: ${test.error?.message || `HTTP ${test.status}`}
`).join('')}
`
)
.join("")}
### Next Steps
1. Check server logs for detailed error information
@@ -437,13 +464,15 @@ ${result.tests.filter(t => !t.success).map(test => `
3. Ensure all required dependencies are installed
4. Validate environment configuration
5. Test endpoints manually if needed
` : `
`
: `
### Optimization Opportunities
1. Monitor response times for performance optimization
2. Consider implementing caching for frequently accessed endpoints
3. Add monitoring and alerting for endpoint health
4. Implement rate limiting if not already in place
`}
`
}
---
*Generated at ${new Date().toISOString()}*
@@ -492,17 +521,19 @@ if (import.meta.url === `file://${process.argv[1]}`) {
runTests()
.then((result) => {
console.log('\n=== tRPC ENDPOINT TEST RESULTS ===');
console.log(`Overall Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== tRPC ENDPOINT TEST RESULTS ===");
console.log(`Overall Success: ${result.success ? "✅" : "❌"}`);
console.log(`Total Duration: ${result.totalDuration}ms`);
console.log(`Passed Tests: ${result.passedTests}/${result.tests.length}`);
console.log(`Failed Tests: ${result.failedTests}/${result.tests.length}`);
console.log(`Critical Failures: ${result.criticalFailures}`);
console.log('\n=== INDIVIDUAL TEST RESULTS ===');
console.log("\n=== INDIVIDUAL TEST RESULTS ===");
for (const test of result.tests) {
const status = test.success ? '✅' : '❌';
console.log(`${status} ${test.name} (HTTP ${test.status}, ${test.duration}ms)`);
const status = test.success ? "✅" : "❌";
console.log(
`${status} ${test.name} (HTTP ${test.status}, ${test.duration}ms)`
);
if (test.error) {
console.log(` Error: ${test.error.message}`);
@@ -520,7 +551,7 @@ if (import.meta.url === `file://${process.argv[1]}`) {
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('tRPC endpoint tests failed:', error);
console.error("tRPC endpoint tests failed:", error);
process.exit(1);
});
}
}

View File

@@ -34,7 +34,10 @@ export class DatabaseValidator {
};
try {
migrationLogger.startStep("DATABASE_VALIDATION", "Running comprehensive database validation");
migrationLogger.startStep(
"DATABASE_VALIDATION",
"Running comprehensive database validation"
);
// Test database connection
await this.validateConnection(result);
@@ -62,13 +65,21 @@ export class DatabaseValidator {
if (result.success) {
migrationLogger.completeStep("DATABASE_VALIDATION");
} else {
migrationLogger.failStep("DATABASE_VALIDATION", new Error(`Validation failed with ${result.errors.length} errors`));
migrationLogger.failStep(
"DATABASE_VALIDATION",
new Error(`Validation failed with ${result.errors.length} errors`)
);
}
} catch (error) {
result.success = false;
result.errors.push(`Database validation failed: ${(error as Error).message}`);
migrationLogger.error("DATABASE_VALIDATION", "Critical validation error", error as Error);
result.errors.push(
`Database validation failed: ${(error as Error).message}`
);
migrationLogger.error(
"DATABASE_VALIDATION",
"Critical validation error",
error as Error
);
} finally {
await this.prisma.$disconnect();
}
@@ -82,34 +93,54 @@ export class DatabaseValidator {
await this.prisma.$queryRaw`SELECT 1`;
migrationLogger.info("DB_CONNECTION", "Database connection successful");
} catch (error) {
result.errors.push(`Database connection failed: ${(error as Error).message}`);
result.errors.push(
`Database connection failed: ${(error as Error).message}`
);
}
}
private async validateSchemaIntegrity(result: ValidationResult): Promise<void> {
private async validateSchemaIntegrity(
result: ValidationResult
): Promise<void> {
migrationLogger.info("SCHEMA_VALIDATION", "Validating schema integrity");
try {
// Check if all required tables exist
const requiredTables = [
'Company', 'User', 'Session', 'SessionImport', 'Message',
'SessionProcessingStatus', 'Question', 'SessionQuestion',
'AIBatchRequest', 'AIProcessingRequest', 'AIModel',
'AIModelPricing', 'CompanyAIModel', 'PlatformUser'
"Company",
"User",
"Session",
"SessionImport",
"Message",
"SessionProcessingStatus",
"Question",
"SessionQuestion",
"AIBatchRequest",
"AIProcessingRequest",
"AIModel",
"AIModelPricing",
"CompanyAIModel",
"PlatformUser",
];
for (const table of requiredTables) {
try {
await this.prisma.$queryRawUnsafe(`SELECT 1 FROM "${table}" LIMIT 1`);
} catch (error) {
result.errors.push(`Required table missing or inaccessible: ${table}`);
result.errors.push(
`Required table missing or inaccessible: ${table}`
);
}
}
// Check for required enums
const requiredEnums = [
'ProcessingStage', 'ProcessingStatus', 'AIBatchRequestStatus',
'AIRequestStatus', 'SentimentCategory', 'SessionCategory'
"ProcessingStage",
"ProcessingStatus",
"AIBatchRequestStatus",
"AIRequestStatus",
"SentimentCategory",
"SessionCategory",
];
for (const enumName of requiredEnums) {
@@ -124,9 +155,10 @@ export class DatabaseValidator {
result.errors.push(`Required enum missing: ${enumName}`);
}
}
} catch (error) {
result.errors.push(`Schema validation failed: ${(error as Error).message}`);
result.errors.push(
`Schema validation failed: ${(error as Error).message}`
);
}
}
@@ -135,7 +167,7 @@ export class DatabaseValidator {
try {
// Check for orphaned records
const orphanedSessions = await this.prisma.$queryRaw<{count: bigint}[]>`
const orphanedSessions = await this.prisma.$queryRaw<{ count: bigint }[]>`
SELECT COUNT(*) as count
FROM "Session" s
LEFT JOIN "Company" c ON s."companyId" = c.id
@@ -143,11 +175,15 @@ export class DatabaseValidator {
`;
if (orphanedSessions[0]?.count > 0) {
result.errors.push(`Found ${orphanedSessions[0].count} orphaned sessions`);
result.errors.push(
`Found ${orphanedSessions[0].count} orphaned sessions`
);
}
// Check for sessions without processing status
const sessionsWithoutStatus = await this.prisma.$queryRaw<{count: bigint}[]>`
const sessionsWithoutStatus = await this.prisma.$queryRaw<
{ count: bigint }[]
>`
SELECT COUNT(*) as count
FROM "Session" s
LEFT JOIN "SessionProcessingStatus" sps ON s.id = sps."sessionId"
@@ -155,11 +191,15 @@ export class DatabaseValidator {
`;
if (sessionsWithoutStatus[0]?.count > 0) {
result.warnings.push(`Found ${sessionsWithoutStatus[0].count} sessions without processing status`);
result.warnings.push(
`Found ${sessionsWithoutStatus[0].count} sessions without processing status`
);
}
// Check for inconsistent batch processing states
const inconsistentBatchStates = await this.prisma.$queryRaw<{count: bigint}[]>`
const inconsistentBatchStates = await this.prisma.$queryRaw<
{ count: bigint }[]
>`
SELECT COUNT(*) as count
FROM "AIProcessingRequest" apr
WHERE apr."batchId" IS NOT NULL
@@ -167,11 +207,14 @@ export class DatabaseValidator {
`;
if (inconsistentBatchStates[0]?.count > 0) {
result.warnings.push(`Found ${inconsistentBatchStates[0].count} requests with inconsistent batch states`);
result.warnings.push(
`Found ${inconsistentBatchStates[0].count} requests with inconsistent batch states`
);
}
} catch (error) {
result.errors.push(`Data integrity validation failed: ${(error as Error).message}`);
result.errors.push(
`Data integrity validation failed: ${(error as Error).message}`
);
}
}
@@ -181,71 +224,91 @@ export class DatabaseValidator {
try {
// Check for missing critical indexes
const criticalIndexes = [
{ table: 'Session', columns: ['companyId', 'startTime'] },
{ table: 'SessionProcessingStatus', columns: ['stage', 'status'] },
{ table: 'AIProcessingRequest', columns: ['processingStatus'] },
{ table: 'AIBatchRequest', columns: ['companyId', 'status'] },
{ table: "Session", columns: ["companyId", "startTime"] },
{ table: "SessionProcessingStatus", columns: ["stage", "status"] },
{ table: "AIProcessingRequest", columns: ["processingStatus"] },
{ table: "AIBatchRequest", columns: ["companyId", "status"] },
];
for (const indexInfo of criticalIndexes) {
const indexExists = await this.prisma.$queryRawUnsafe(`
const indexExists = (await this.prisma.$queryRawUnsafe(`
SELECT COUNT(*) as count
FROM pg_indexes
WHERE tablename = '${indexInfo.table}'
AND indexdef LIKE '%${indexInfo.columns.join('%')}%'
`) as {count: string}[];
AND indexdef LIKE '%${indexInfo.columns.join("%")}%'
`)) as { count: string }[];
if (parseInt(indexExists[0]?.count || '0') === 0) {
result.warnings.push(`Missing recommended index on ${indexInfo.table}(${indexInfo.columns.join(', ')})`);
if (parseInt(indexExists[0]?.count || "0") === 0) {
result.warnings.push(
`Missing recommended index on ${indexInfo.table}(${indexInfo.columns.join(", ")})`
);
}
}
} catch (error) {
result.warnings.push(`Index validation failed: ${(error as Error).message}`);
result.warnings.push(
`Index validation failed: ${(error as Error).message}`
);
}
}
private async validateBatchProcessingReadiness(result: ValidationResult): Promise<void> {
migrationLogger.info("BATCH_READINESS", "Validating batch processing readiness");
private async validateBatchProcessingReadiness(
result: ValidationResult
): Promise<void> {
migrationLogger.info(
"BATCH_READINESS",
"Validating batch processing readiness"
);
try {
// Check if AIBatchRequest table is properly configured
const batchTableCheck = await this.prisma.$queryRaw<{count: bigint}[]>`
const batchTableCheck = await this.prisma.$queryRaw<{ count: bigint }[]>`
SELECT COUNT(*) as count FROM "AIBatchRequest"
`;
// Check if AIProcessingRequest has batch-related fields
const batchFieldsCheck = await this.prisma.$queryRawUnsafe(`
const batchFieldsCheck = (await this.prisma.$queryRawUnsafe(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'AIProcessingRequest'
AND column_name IN ('processingStatus', 'batchId')
`) as {column_name: string}[];
`)) as { column_name: string }[];
if (batchFieldsCheck.length < 2) {
result.errors.push("AIProcessingRequest table missing batch processing fields");
result.errors.push(
"AIProcessingRequest table missing batch processing fields"
);
}
// Check if batch status enum values are correct
const batchStatusValues = await this.prisma.$queryRawUnsafe(`
const batchStatusValues = (await this.prisma.$queryRawUnsafe(`
SELECT unnest(enum_range(NULL::AIBatchRequestStatus)) as value
`) as {value: string}[];
`)) as { value: string }[];
const requiredBatchStatuses = [
'PENDING', 'UPLOADING', 'VALIDATING', 'IN_PROGRESS',
'FINALIZING', 'COMPLETED', 'PROCESSED', 'FAILED', 'CANCELLED'
"PENDING",
"UPLOADING",
"VALIDATING",
"IN_PROGRESS",
"FINALIZING",
"COMPLETED",
"PROCESSED",
"FAILED",
"CANCELLED",
];
const missingStatuses = requiredBatchStatuses.filter(
status => !batchStatusValues.some(v => v.value === status)
(status) => !batchStatusValues.some((v) => v.value === status)
);
if (missingStatuses.length > 0) {
result.errors.push(`Missing batch status values: ${missingStatuses.join(', ')}`);
result.errors.push(
`Missing batch status values: ${missingStatuses.join(", ")}`
);
}
} catch (error) {
result.errors.push(`Batch processing readiness validation failed: ${(error as Error).message}`);
result.errors.push(
`Batch processing readiness validation failed: ${(error as Error).message}`
);
}
}
@@ -265,14 +328,16 @@ export class DatabaseValidator {
try {
await test();
} catch (error) {
result.warnings.push(`Prisma model access issue: ${(error as Error).message}`);
result.warnings.push(
`Prisma model access issue: ${(error as Error).message}`
);
}
}
// Test complex queries that tRPC will use
try {
await this.prisma.session.findMany({
where: { companyId: 'test' },
where: { companyId: "test" },
include: {
messages: true,
processingStatus: true,
@@ -281,13 +346,16 @@ export class DatabaseValidator {
});
} catch (error) {
// This is expected to fail with the test companyId, but should not error on structure
if (!(error as Error).message.includes('test')) {
result.warnings.push(`Complex query structure issue: ${(error as Error).message}`);
if (!(error as Error).message.includes("test")) {
result.warnings.push(
`Complex query structure issue: ${(error as Error).message}`
);
}
}
} catch (error) {
result.warnings.push(`tRPC readiness validation failed: ${(error as Error).message}`);
result.warnings.push(
`tRPC readiness validation failed: ${(error as Error).message}`
);
}
}
@@ -301,7 +369,8 @@ export class DatabaseValidator {
const sessionsCount = await this.prisma.session.count();
const messagesCount = await this.prisma.message.count();
const batchRequestsCount = await this.prisma.aIBatchRequest.count();
const processingRequestsCount = await this.prisma.aIProcessingRequest.count();
const processingRequestsCount =
await this.prisma.aIProcessingRequest.count();
result.metrics = {
companies: companiesCount,
@@ -313,27 +382,31 @@ export class DatabaseValidator {
};
// Check processing status distribution
const processingStatusCounts = await this.prisma.sessionProcessingStatus.groupBy({
by: ['status'],
_count: { status: true },
});
const processingStatusCounts =
await this.prisma.sessionProcessingStatus.groupBy({
by: ["status"],
_count: { status: true },
});
for (const statusCount of processingStatusCounts) {
result.metrics[`processing_${statusCount.status.toLowerCase()}`] = statusCount._count.status;
result.metrics[`processing_${statusCount.status.toLowerCase()}`] =
statusCount._count.status;
}
// Check batch request status distribution
const batchStatusCounts = await this.prisma.aIBatchRequest.groupBy({
by: ['status'],
by: ["status"],
_count: { status: true },
});
for (const statusCount of batchStatusCounts) {
result.metrics[`batch_${statusCount.status.toLowerCase()}`] = statusCount._count.status;
result.metrics[`batch_${statusCount.status.toLowerCase()}`] =
statusCount._count.status;
}
} catch (error) {
result.warnings.push(`Metrics collection failed: ${(error as Error).message}`);
result.warnings.push(
`Metrics collection failed: ${(error as Error).message}`
);
}
}
}
@@ -342,22 +415,23 @@ export class DatabaseValidator {
if (import.meta.url === `file://${process.argv[1]}`) {
const validator = new DatabaseValidator();
validator.validateDatabase()
validator
.validateDatabase()
.then((result) => {
console.log('\n=== DATABASE VALIDATION RESULTS ===');
console.log(`Success: ${result.success ? '✅' : '❌'}`);
console.log("\n=== DATABASE VALIDATION RESULTS ===");
console.log(`Success: ${result.success ? "✅" : "❌"}`);
if (result.errors.length > 0) {
console.log('\n❌ ERRORS:');
result.errors.forEach(error => console.log(` - ${error}`));
console.log("\n❌ ERRORS:");
result.errors.forEach((error) => console.log(` - ${error}`));
}
if (result.warnings.length > 0) {
console.log('\n⚠ WARNINGS:');
result.warnings.forEach(warning => console.log(` - ${warning}`));
console.log("\n⚠ WARNINGS:");
result.warnings.forEach((warning) => console.log(` - ${warning}`));
}
console.log('\n📊 METRICS:');
console.log("\n📊 METRICS:");
Object.entries(result.metrics).forEach(([key, value]) => {
console.log(` ${key}: ${value}`);
});
@@ -365,7 +439,7 @@ if (import.meta.url === `file://${process.argv[1]}`) {
process.exit(result.success ? 0 : 1);
})
.catch((error) => {
console.error('Validation failed:', error);
console.error("Validation failed:", error);
process.exit(1);
});
}
}

198
scripts/test-csp.ts Normal file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env tsx
import { buildCSP, validateCSP, generateNonce } from "../lib/csp";
interface TestCase {
name: string;
config: Parameters<typeof buildCSP>[0];
shouldPass: boolean;
expectedWarnings?: number;
expectedErrors?: number;
}
const testCases: TestCase[] = [
{
name: "Development CSP",
config: { isDevelopment: true },
shouldPass: true,
expectedWarnings: 2, // unsafe-eval and unsafe-inline warnings
},
{
name: "Production CSP with nonce",
config: {
nonce: generateNonce(),
isDevelopment: false,
reportUri: "/api/csp-report",
},
shouldPass: true,
expectedWarnings: 0,
},
{
name: "Production CSP without nonce (fallback)",
config: {
isDevelopment: false,
reportUri: "/api/csp-report",
},
shouldPass: true,
expectedWarnings: 1, // unsafe-inline warning for styles
},
{
name: "Enforce mode enabled",
config: {
nonce: generateNonce(),
isDevelopment: false,
enforceMode: true,
reportUri: "/api/csp-report",
},
shouldPass: true,
expectedWarnings: 0,
},
];
function runCSPTests() {
console.log("🔒 Running CSP Tests\n");
let passed = 0;
let failed = 0;
for (const testCase of testCases) {
console.log(`Testing: ${testCase.name}`);
try {
// Build CSP
const csp = buildCSP(testCase.config);
console.log(` CSP: ${csp.substring(0, 100)}...`);
// Validate CSP
const validation = validateCSP(csp);
console.log(` Valid: ${validation.isValid}`);
console.log(` Warnings: ${validation.warnings.length}`);
console.log(` Errors: ${validation.errors.length}`);
if (validation.warnings.length > 0) {
console.log(` Warning details: ${validation.warnings.join(", ")}`);
}
if (validation.errors.length > 0) {
console.log(` Error details: ${validation.errors.join(", ")}`);
}
// Check expectations
const passedValidation = validation.isValid === testCase.shouldPass;
const warningsMatch =
testCase.expectedWarnings === undefined ||
validation.warnings.length === testCase.expectedWarnings;
const errorsMatch =
testCase.expectedErrors === undefined ||
validation.errors.length === testCase.expectedErrors;
if (passedValidation && warningsMatch && errorsMatch) {
console.log(" ✅ PASSED\n");
passed++;
} else {
console.log(" ❌ FAILED");
if (!passedValidation) {
console.log(
` Expected valid: ${testCase.shouldPass}, got: ${validation.isValid}`
);
}
if (!warningsMatch) {
console.log(
` Expected warnings: ${testCase.expectedWarnings}, got: ${validation.warnings.length}`
);
}
if (!errorsMatch) {
console.log(
` Expected errors: ${testCase.expectedErrors}, got: ${validation.errors.length}`
);
}
console.log("");
failed++;
}
} catch (error) {
console.log(` ❌ FAILED: ${error}`);
failed++;
}
}
console.log(`\n📊 Results: ${passed} passed, ${failed} failed`);
if (failed > 0) {
process.exit(1);
}
}
function testCSPDirectives() {
console.log("\n🔍 Testing CSP Directives\n");
const nonce = generateNonce();
const productionCSP = buildCSP({
nonce,
isDevelopment: false,
reportUri: "/api/csp-report",
});
console.log("Production CSP:");
console.log(productionCSP);
console.log("");
// Check for required directives
const requiredDirectives = [
"default-src 'self'",
`script-src 'self' 'nonce-${nonce}' 'strict-dynamic'`,
`style-src 'self' 'nonce-${nonce}'`,
"object-src 'none'",
"base-uri 'self'",
"form-action 'self'",
"frame-ancestors 'none'",
"upgrade-insecure-requests",
"report-uri /api/csp-report",
];
console.log("Required directives check:");
for (const directive of requiredDirectives) {
const present = productionCSP.includes(directive);
console.log(` ${present ? "✅" : "❌"} ${directive}`);
}
console.log("\nDevelopment CSP:");
const devCSP = buildCSP({ isDevelopment: true });
console.log(devCSP);
}
function testNonceGeneration() {
console.log("\n🎲 Testing Nonce Generation\n");
const nonces = new Set();
const iterations = 1000;
for (let i = 0; i < iterations; i++) {
const nonce = generateNonce();
// Check format
if (!/^[A-Za-z0-9+/]+=*$/.test(nonce)) {
console.log(`❌ Invalid nonce format: ${nonce}`);
return;
}
// Check uniqueness
if (nonces.has(nonce)) {
console.log(`❌ Duplicate nonce detected: ${nonce}`);
return;
}
nonces.add(nonce);
}
console.log(`✅ Generated ${iterations} unique nonces`);
console.log(`✅ All nonces have valid base64 format`);
console.log(`Example nonce: ${Array.from(nonces)[0]}`);
}
// Run all tests
if (import.meta.url === `file://${process.argv[1]}`) {
runCSPTests();
testCSPDirectives();
testNonceGeneration();
}

View File

@@ -100,7 +100,9 @@ async function testSecurityHeaders(url: string): Promise<void> {
method: "HEAD", // Use HEAD to avoid downloading the full response body
});
console.log(`📊 Response Status: ${response.status} ${response.statusText}\n`);
console.log(
`📊 Response Status: ${response.status} ${response.statusText}\n`
);
let criticalMissing = 0;
let warningCount = 0;
@@ -151,7 +153,9 @@ async function testSecurityHeaders(url: string): Promise<void> {
}
if (cspIssues > 0) {
console.log(` ⚠️ ${cspIssues} CSP directive(s) missing or incorrect\n`);
console.log(
` ⚠️ ${cspIssues} CSP directive(s) missing or incorrect\n`
);
warningCount += cspIssues;
} else {
console.log(` ✅ All CSP directives present\n`);
@@ -187,36 +191,49 @@ async function testSecurityHeaders(url: string): Promise<void> {
if (isHttps && !hsts) {
console.log("⚠️ WARNING: HTTPS site missing HSTS header");
console.log(" Consider adding Strict-Transport-Security for production\n");
console.log(
" Consider adding Strict-Transport-Security for production\n"
);
warningCount++;
} else if (hsts && !isHttps) {
console.log(" INFO: HSTS header present on HTTP site (will be ignored by browsers)\n");
console.log(
" INFO: HSTS header present on HTTP site (will be ignored by browsers)\n"
);
}
// Summary
console.log("=" .repeat(60));
console.log("=".repeat(60));
console.log("📋 SECURITY HEADERS SUMMARY");
console.log("=" .repeat(60));
console.log("=".repeat(60));
if (criticalMissing === 0 && warningCount === 0) {
console.log("🎉 EXCELLENT: All security headers are properly configured!");
console.log(
"🎉 EXCELLENT: All security headers are properly configured!"
);
} else if (criticalMissing === 0) {
console.log(`✅ GOOD: No critical issues found`);
console.log(`⚠️ ${warningCount} warning(s) - consider addressing these for optimal security`);
console.log(
`⚠️ ${warningCount} warning(s) - consider addressing these for optimal security`
);
} else {
console.log(`❌ ISSUES FOUND:`);
console.log(` Critical: ${criticalMissing}`);
console.log(` Warnings: ${warningCount}`);
console.log(`\n🔧 Please address critical issues before deploying to production`);
console.log(
`\n🔧 Please address critical issues before deploying to production`
);
}
// Additional recommendations
console.log("\n💡 ADDITIONAL RECOMMENDATIONS:");
console.log("• Regularly test headers with online tools like securityheaders.com");
console.log(
"• Regularly test headers with online tools like securityheaders.com"
);
console.log("• Monitor CSP violations in production to fine-tune policies");
console.log("• Consider implementing HSTS preloading for production domains");
console.log(
"• Consider implementing HSTS preloading for production domains"
);
console.log("• Review and update security headers based on new threats");
} catch (error) {
console.error(`❌ Error testing headers: ${error}`);
process.exit(1);
@@ -228,7 +245,7 @@ async function main() {
const url = process.argv[2] || "http://localhost:3000";
console.log("🛡️ Security Headers Testing Tool");
console.log("=" .repeat(60));
console.log("=".repeat(60));
await testSecurityHeaders(url);
}
@@ -238,4 +255,4 @@ if (require.main === module) {
console.error("Script failed:", error);
process.exit(1);
});
}
}

View File

@@ -0,0 +1,479 @@
#!/usr/bin/env tsx
import {
buildCSP,
validateCSP,
generateNonce,
detectCSPBypass,
testCSPImplementation,
} from "../lib/csp";
interface CSPTestResult {
test: string;
passed: boolean;
details?: string;
recommendations?: string[];
}
class CSPValidator {
private results: CSPTestResult[] = [];
private addResult(
test: string,
passed: boolean,
details?: string,
recommendations?: string[]
) {
this.results.push({ test, passed, details, recommendations });
}
async validateNonceGeneration() {
console.log("🎲 Testing Nonce Generation...");
// Test uniqueness
const nonces = new Set();
const iterations = 1000;
let duplicateFound = false;
for (let i = 0; i < iterations; i++) {
const nonce = generateNonce();
if (nonces.has(nonce)) {
duplicateFound = true;
break;
}
nonces.add(nonce);
}
this.addResult(
"Nonce Uniqueness",
!duplicateFound,
duplicateFound
? "Duplicate nonce detected"
: `${iterations} unique nonces generated`,
duplicateFound
? ["Check entropy source", "Verify crypto.randomBytes"]
: undefined
);
// Test format
const testNonce = generateNonce();
const validFormat = /^[A-Za-z0-9+/]+=*$/.test(testNonce);
this.addResult(
"Nonce Format",
validFormat,
`Generated nonce: ${testNonce}`,
!validFormat ? ["Ensure proper base64 encoding"] : undefined
);
// Test length
const decodedLength = Buffer.from(testNonce, "base64").length;
const correctLength = decodedLength === 16;
this.addResult(
"Nonce Length",
correctLength,
`Decoded length: ${decodedLength} bytes`,
!correctLength ? ["Use 16 bytes (128 bits) for security"] : undefined
);
}
async validateProductionCSP() {
console.log("🛡️ Testing Production CSP...");
const nonce = generateNonce();
const productionCSP = buildCSP({
nonce,
isDevelopment: false,
reportUri: "/api/csp-report",
enforceMode: true,
});
console.log("Production CSP:", productionCSP);
// Validate overall structure
const validation = validateCSP(productionCSP);
this.addResult(
"CSP Validation",
validation.isValid,
`Errors: ${validation.errors.length}, Warnings: ${validation.warnings.length}`,
validation.errors.length > 0 ? validation.errors : undefined
);
// Check for secure directives
const securityTests = [
{
name: "No unsafe-inline in scripts",
test:
!productionCSP.includes("script-src") ||
!productionCSP.match(/script-src[^;]*'unsafe-inline'/),
critical: true,
},
{
name: "No unsafe-eval in scripts",
test: !productionCSP.includes("'unsafe-eval'"),
critical: true,
},
{
name: "Nonce-based script execution",
test: productionCSP.includes(`'nonce-${nonce}'`),
critical: true,
},
{
name: "Strict dynamic enabled",
test: productionCSP.includes("'strict-dynamic'"),
critical: false,
},
{
name: "Object sources blocked",
test: productionCSP.includes("object-src 'none'"),
critical: true,
},
{
name: "Base URI restricted",
test: productionCSP.includes("base-uri 'self'"),
critical: true,
},
{
name: "Frame ancestors blocked",
test: productionCSP.includes("frame-ancestors 'none'"),
critical: true,
},
{
name: "HTTPS upgrade enabled",
test: productionCSP.includes("upgrade-insecure-requests"),
critical: false,
},
{
name: "Report URI configured",
test: productionCSP.includes("report-uri /api/csp-report"),
critical: false,
},
];
for (const secTest of securityTests) {
this.addResult(
secTest.name,
secTest.test,
undefined,
!secTest.test && secTest.critical
? ["This is a critical security requirement"]
: undefined
);
}
}
async validateDevelopmentCSP() {
console.log("🔧 Testing Development CSP...");
const devCSP = buildCSP({
isDevelopment: true,
reportUri: "/api/csp-report",
});
console.log("Development CSP:", devCSP);
// Development should be more permissive but still secure
const devTests = [
{
name: "Allows unsafe-eval for dev tools",
test: devCSP.includes("'unsafe-eval'"),
},
{
name: "Allows unsafe-inline for hot reload",
test: devCSP.includes("'unsafe-inline'"),
},
{
name: "Allows WebSocket connections",
test: devCSP.includes("wss:") || devCSP.includes("ws:"),
},
{
name: "Still blocks objects",
test: devCSP.includes("object-src 'none'"),
},
{
name: "Still restricts base URI",
test: devCSP.includes("base-uri 'self'"),
},
];
for (const devTest of devTests) {
this.addResult(devTest.name, devTest.test);
}
}
async validateBypassDetection() {
console.log("🕵️ Testing Bypass Detection...");
const bypassTests = [
{
name: "Detects javascript: protocol",
content: "window.location.href = 'javascript:alert(1)'",
shouldDetect: true,
},
{
name: "Detects data: HTML injection",
content: "iframe.src = 'data:text/html,<script>alert(1)</script>'",
shouldDetect: true,
},
{
name: "Detects eval injection",
content: "eval('malicious code')",
shouldDetect: true,
},
{
name: "Detects Function constructor",
content: "new Function('alert(1)')()",
shouldDetect: true,
},
{
name: "Detects setTimeout string",
content: "setTimeout('alert(1)', 1000)",
shouldDetect: true,
},
{
name: "Ignores legitimate content",
content: "This is normal text with no dangerous patterns",
shouldDetect: false,
},
{
name: "Ignores safe JavaScript",
content: "function safeFunction() { return 'hello'; }",
shouldDetect: false,
},
];
for (const bypassTest of bypassTests) {
const detection = detectCSPBypass(bypassTest.content);
const passed = detection.isDetected === bypassTest.shouldDetect;
this.addResult(
bypassTest.name,
passed,
`Detected: ${detection.isDetected}, Risk: ${detection.riskLevel}`,
!passed ? ["Review bypass detection patterns"] : undefined
);
}
}
async validateContentSources() {
console.log("🌐 Testing Content Source Restrictions...");
const nonce = generateNonce();
const csp = buildCSP({
nonce,
isDevelopment: false,
reportUri: "/api/csp-report",
});
// Check specific content source restrictions
const sourceTests = [
{
name: "Script sources are restrictive",
test: () => {
const scriptMatch = csp.match(/script-src ([^;]+)/);
if (!scriptMatch) return false;
const sources = scriptMatch[1];
return (
sources.includes("'self'") &&
sources.includes(`'nonce-${nonce}'`) &&
!sources.includes("'unsafe-inline'") &&
!sources.includes("*")
);
},
},
{
name: "Style sources use nonce",
test: () => {
const styleMatch = csp.match(/style-src ([^;]+)/);
if (!styleMatch) return false;
const sources = styleMatch[1];
return (
sources.includes("'self'") && sources.includes(`'nonce-${nonce}'`)
);
},
},
{
name: "Image sources are limited",
test: () => {
const imgMatch = csp.match(/img-src ([^;]+)/);
if (!imgMatch) return false;
const sources = imgMatch[1];
return (
sources.includes("'self'") &&
sources.includes("data:") &&
!sources.includes("*")
);
},
},
{
name: "Connect sources are specific",
test: () => {
const connectMatch = csp.match(/connect-src ([^;]+)/);
if (!connectMatch) return false;
const sources = connectMatch[1];
return (
sources.includes("'self'") &&
sources.includes("https://api.openai.com") &&
!sources.includes("ws:") &&
!sources.includes("wss:")
);
},
},
{
name: "Font sources are restricted",
test: () => {
const fontMatch = csp.match(/font-src ([^;]+)/);
if (!fontMatch) return false;
const sources = fontMatch[1];
return (
sources.includes("'self'") &&
sources.includes("data:") &&
!sources.includes("*")
);
},
},
];
for (const sourceTest of sourceTests) {
this.addResult(
sourceTest.name,
sourceTest.test(),
undefined,
!sourceTest.test()
? ["Review and tighten content source restrictions"]
: undefined
);
}
}
async validateCompatibility() {
console.log("🔄 Testing Framework Compatibility...");
// Test that CSP works with Next.js requirements
const compatibilityTests = [
{
name: "Next.js development compatibility",
test: () => {
const devCSP = buildCSP({ isDevelopment: true });
return devCSP.includes("'unsafe-eval'"); // Required for Next.js dev
},
},
{
name: "TailwindCSS compatibility",
test: () => {
const csp = buildCSP({ isDevelopment: false });
// Should either have nonce or unsafe-inline for styles
return (
csp.includes("'nonce-") ||
csp.includes("style-src 'self' 'unsafe-inline'")
);
},
},
{
name: "JSON-LD support",
test: () => {
const nonce = generateNonce();
const csp = buildCSP({ nonce, isDevelopment: false });
// Should allow nonce-based inline scripts
return csp.includes(`'nonce-${nonce}'`);
},
},
];
for (const compatTest of compatibilityTests) {
this.addResult(
compatTest.name,
compatTest.test(),
undefined,
!compatTest.test() ? ["Ensure framework compatibility"] : undefined
);
}
}
generateReport() {
console.log("\n📊 CSP Validation Report");
console.log("=".repeat(50));
const passed = this.results.filter((r) => r.passed).length;
const failed = this.results.filter((r) => r.passed === false).length;
const critical = this.results.filter(
(r) =>
!r.passed && r.recommendations?.some((rec) => rec.includes("critical"))
).length;
console.log(`\n📈 Summary: ${passed} passed, ${failed} failed`);
if (critical > 0) {
console.log(`⚠️ Critical issues: ${critical}`);
}
console.log("\n📋 Detailed Results:");
for (const result of this.results) {
const status = result.passed ? "✅" : "❌";
console.log(`${status} ${result.test}`);
if (result.details) {
console.log(` ${result.details}`);
}
if (result.recommendations) {
for (const rec of result.recommendations) {
console.log(` 💡 ${rec}`);
}
}
}
// Security score
const securityScore = Math.round((passed / this.results.length) * 100);
console.log(`\n🛡 Security Score: ${securityScore}%`);
if (securityScore >= 90) {
console.log("🎉 Excellent CSP implementation!");
} else if (securityScore >= 80) {
console.log("🔧 Good CSP implementation with room for improvement");
} else if (securityScore >= 70) {
console.log("⚠️ CSP implementation needs attention");
} else {
console.log("🚨 CSP implementation has serious security issues");
}
return {
passed,
failed,
critical,
securityScore,
success: failed === 0 && critical === 0,
};
}
async run() {
console.log("🔒 Enhanced CSP Implementation Validation");
console.log("=".repeat(50));
await this.validateNonceGeneration();
await this.validateProductionCSP();
await this.validateDevelopmentCSP();
await this.validateBypassDetection();
await this.validateContentSources();
await this.validateCompatibility();
return this.generateReport();
}
}
// Run validation if this script is called directly
if (import.meta.url === `file://${process.argv[1]}`) {
const validator = new CSPValidator();
validator
.run()
.then((report) => {
if (!report.success) {
process.exit(1);
}
})
.catch((error) => {
console.error("❌ Validation failed:", error);
process.exit(1);
});
}