Yaltopia-Tickets-Backend/scripts/migrate-provider.ts
debudebuye 98d4bb52c3 Initial commit: Receipt Verification API with universal adapter pattern
- JWT authentication with Supabase integration
- Role-based access control (Admin, Owner, Staff, Auditor)
- Universal database adapter (Prisma/Supabase/MongoDB support)
- User management with hierarchical permissions
- Redis caching service (configured but optional)
- Comprehensive API documentation
- Production-ready NestJS architecture
- Migration scripts for provider switching
- Swagger/OpenAPI documentation
2025-12-21 22:05:22 +03:00

372 lines
12 KiB
TypeScript

#!/usr/bin/env ts-node
/**
* Database Provider Migration Script
*
* This script helps migrate from one database provider to another
* with minimal downtime and data loss.
*
* Usage:
* npm run migrate:provider -- --from supabase --to prisma
* npm run migrate:provider -- --from prisma --to mongodb
*/
import { NestFactory } from '@nestjs/core';
import { Logger } from '@nestjs/common';
import { AppModule } from '../src/app.module';
import { UniversalDatabaseAdapter } from '../src/shared/adapters/universal-database.adapter';
import * as fs from 'fs';
import * as path from 'path';
interface MigrationOptions {
from: string;
to: string;
dryRun?: boolean;
backup?: boolean;
batchSize?: number;
}
class ProviderMigrationService {
private readonly logger = new Logger(ProviderMigrationService.name);
private adapter: UniversalDatabaseAdapter;
constructor(adapter: UniversalDatabaseAdapter) {
this.adapter = adapter;
}
async migrateProvider(options: MigrationOptions): Promise<void> {
this.logger.log(`🚀 Starting migration from ${options.from} to ${options.to}`);
try {
// Step 1: Validate source and target
await this.validateProviders(options.from, options.to);
// Step 2: Create backup if requested
if (options.backup) {
await this.createBackup(options.from);
}
// Step 3: Export data from source
const exportedData = await this.exportData(options.from, options.batchSize);
// Step 4: Prepare target database
await this.prepareTarget(options.to);
// Step 5: Import data to target (dry run check)
if (options.dryRun) {
this.logger.log('🔍 Dry run mode - validating data compatibility...');
await this.validateDataCompatibility(exportedData, options.to);
this.logger.log('✅ Dry run completed successfully');
return;
}
// Step 6: Import data to target
await this.importData(exportedData, options.to, options.batchSize);
// Step 7: Verify migration
await this.verifyMigration(options.from, options.to);
// Step 8: Update configuration
await this.updateConfiguration(options.to);
this.logger.log('✅ Migration completed successfully!');
this.logger.log(`📋 Next steps:`);
this.logger.log(` 1. Update your .env file with new provider settings`);
this.logger.log(` 2. Restart your application`);
this.logger.log(` 3. Run health checks to verify everything is working`);
} catch (error) {
this.logger.error('❌ Migration failed:', error.message);
throw error;
}
}
private async validateProviders(from: string, to: string): Promise<void> {
const supportedProviders = ['supabase', 'prisma', 'mongodb', 'dynamodb'];
if (!supportedProviders.includes(from)) {
throw new Error(`Unsupported source provider: ${from}`);
}
if (!supportedProviders.includes(to)) {
throw new Error(`Unsupported target provider: ${to}`);
}
if (from === to) {
throw new Error('Source and target providers cannot be the same');
}
this.logger.log(`✅ Providers validated: ${from}${to}`);
}
private async createBackup(provider: string): Promise<void> {
this.logger.log(`💾 Creating backup for ${provider}...`);
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const backupDir = path.join(process.cwd(), 'backups', timestamp);
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
// Export all data
const data = await this.exportData(provider);
// Save to backup file
const backupFile = path.join(backupDir, `${provider}-backup.json`);
fs.writeFileSync(backupFile, JSON.stringify(data, null, 2));
this.logger.log(`✅ Backup created: ${backupFile}`);
}
private async exportData(provider: string, batchSize = 1000): Promise<any> {
this.logger.log(`📤 Exporting data from ${provider}...`);
// Temporarily set the provider
process.env.DATABASE_PROVIDER = provider;
const userRepo = this.adapter.getUserRepository();
// Export users in batches
const users = await userRepo.findAll();
// Export other entities...
// const receipts = await receiptRepo.findAll();
// const verifications = await verificationRepo.findAll();
const exportedData = {
metadata: {
exportedAt: new Date().toISOString(),
sourceProvider: provider,
recordCounts: {
users: users.length,
// receipts: receipts.length,
// verifications: verifications.length,
},
},
data: {
users,
// receipts,
// verifications,
},
};
this.logger.log(`✅ Data exported: ${users.length} users`);
return exportedData;
}
private async prepareTarget(provider: string): Promise<void> {
this.logger.log(`🔧 Preparing target database: ${provider}...`);
// Temporarily set the provider
process.env.DATABASE_PROVIDER = provider;
// Run migrations if needed
await this.adapter.migrate();
this.logger.log(`✅ Target database prepared`);
}
private async validateDataCompatibility(data: any, provider: string): Promise<void> {
this.logger.log(`🔍 Validating data compatibility with ${provider}...`);
// Check for provider-specific constraints
switch (provider) {
case 'mongodb':
// MongoDB doesn't support foreign keys, check for references
this.validateMongoDBCompatibility(data);
break;
case 'dynamodb':
// DynamoDB has different data modeling requirements
this.validateDynamoDBCompatibility(data);
break;
case 'prisma':
// Prisma has strict schema requirements
this.validatePrismaCompatibility(data);
break;
}
this.logger.log(`✅ Data compatibility validated`);
}
private validateMongoDBCompatibility(data: any): void {
// Check for relational data that needs to be embedded or referenced
const { users } = data.data;
users.forEach((user: any) => {
if (user.ownerId) {
// This relationship will need to be handled differently in MongoDB
this.logger.warn(`User ${user.id} has owner relationship that may need restructuring`);
}
});
}
private validateDynamoDBCompatibility(data: any): void {
// Check for complex queries that might not work well with DynamoDB
this.logger.warn('DynamoDB migration requires careful consideration of access patterns');
}
private validatePrismaCompatibility(data: any): void {
// Validate against Prisma schema constraints
const { users } = data.data;
users.forEach((user: any) => {
if (!user.id || !user.role) {
throw new Error(`User missing required fields: ${JSON.stringify(user)}`);
}
});
}
private async importData(data: any, provider: string, batchSize = 1000): Promise<void> {
this.logger.log(`📥 Importing data to ${provider}...`);
// Temporarily set the provider
process.env.DATABASE_PROVIDER = provider;
const userRepo = this.adapter.getUserRepository();
const { users } = data.data;
// Import users in batches
for (let i = 0; i < users.length; i += batchSize) {
const batch = users.slice(i, i + batchSize);
for (const user of batch) {
try {
await userRepo.create(user);
} catch (error) {
this.logger.error(`Failed to import user ${user.id}:`, error.message);
// Continue with next user
}
}
this.logger.log(`Imported batch ${Math.floor(i / batchSize) + 1}/${Math.ceil(users.length / batchSize)}`);
}
this.logger.log(`✅ Data imported: ${users.length} users`);
}
private async verifyMigration(from: string, to: string): Promise<void> {
this.logger.log(`🔍 Verifying migration from ${from} to ${to}...`);
// Get counts from both providers
process.env.DATABASE_PROVIDER = from;
const sourceUserRepo = this.adapter.getUserRepository();
const sourceCount = (await sourceUserRepo.findAll()).length;
process.env.DATABASE_PROVIDER = to;
const targetUserRepo = this.adapter.getUserRepository();
const targetCount = (await targetUserRepo.findAll()).length;
if (sourceCount !== targetCount) {
throw new Error(`Data count mismatch: source=${sourceCount}, target=${targetCount}`);
}
this.logger.log(`✅ Migration verified: ${targetCount} records migrated successfully`);
}
private async updateConfiguration(provider: string): Promise<void> {
this.logger.log(`⚙️ Updating configuration for ${provider}...`);
const envPath = path.join(process.cwd(), '.env');
let envContent = fs.readFileSync(envPath, 'utf8');
// Update DATABASE_PROVIDER
envContent = envContent.replace(
/DATABASE_PROVIDER=.*/,
`DATABASE_PROVIDER=${provider}`
);
// Add provider-specific configuration templates
switch (provider) {
case 'mongodb':
if (!envContent.includes('MONGODB_URL')) {
envContent += '\n# MongoDB Configuration\n';
envContent += 'MONGODB_URL="mongodb://localhost:27017/receipt-verification"\n';
}
break;
case 'dynamodb':
if (!envContent.includes('AWS_DYNAMODB_REGION')) {
envContent += '\n# DynamoDB Configuration\n';
envContent += 'AWS_DYNAMODB_REGION="us-east-1"\n';
envContent += 'AWS_DYNAMODB_TABLE_PREFIX="receipt-verification"\n';
}
break;
}
// Write updated configuration
const backupPath = `${envPath}.backup.${Date.now()}`;
fs.copyFileSync(envPath, backupPath);
fs.writeFileSync(envPath, envContent);
this.logger.log(`✅ Configuration updated (backup: ${backupPath})`);
}
}
// CLI Interface
async function main() {
const args = process.argv.slice(2);
const options: MigrationOptions = {
from: '',
to: '',
dryRun: false,
backup: true,
batchSize: 1000,
};
// Parse command line arguments
for (let i = 0; i < args.length; i += 2) {
const key = args[i].replace('--', '');
const value = args[i + 1];
switch (key) {
case 'from':
options.from = value;
break;
case 'to':
options.to = value;
break;
case 'dry-run':
options.dryRun = value === 'true';
break;
case 'backup':
options.backup = value !== 'false';
break;
case 'batch-size':
options.batchSize = parseInt(value);
break;
}
}
if (!options.from || !options.to) {
console.log('Usage: npm run migrate:provider -- --from <provider> --to <provider>');
console.log('Options:');
console.log(' --from <provider> Source database provider');
console.log(' --to <provider> Target database provider');
console.log(' --dry-run <boolean> Run validation only (default: false)');
console.log(' --backup <boolean> Create backup before migration (default: true)');
console.log(' --batch-size <number> Batch size for data migration (default: 1000)');
process.exit(1);
}
try {
const app = await NestFactory.createApplicationContext(AppModule);
const adapter = app.get(UniversalDatabaseAdapter);
const migrationService = new ProviderMigrationService(adapter);
await migrationService.migrateProvider(options);
await app.close();
} catch (error) {
console.error('Migration failed:', error.message);
process.exit(1);
}
}
if (require.main === module) {
main();
}
export { ProviderMigrationService };