Initial commit - Black Canyon Tickets whitelabel platform

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-07-08 12:31:31 -06:00
commit 997c129383
139 changed files with 60476 additions and 0 deletions

209
scripts/backup.js Executable file
View File

@@ -0,0 +1,209 @@
#!/usr/bin/env node
/**
* Backup management script
* Usage: node scripts/backup.js <command> [options]
*/
const { backupManager, backupScheduler, DisasterRecovery } = require('../src/lib/backup.ts');
// Parse command line arguments
const args = process.argv.slice(2);
const command = args[0];
async function main() {
try {
switch (command) {
case 'create':
await createBackup();
break;
case 'restore':
await restoreBackup();
break;
case 'list':
await listBackups();
break;
case 'cleanup':
await cleanupBackups();
break;
case 'schedule':
await startScheduler();
break;
case 'verify':
await verifyIntegrity();
break;
case 'disaster-recovery':
await createDisasterRecoveryBackup();
break;
default:
showUsage();
}
} catch (error) {
console.error('Error:', error.message);
process.exit(1);
}
}
async function createBackup() {
const type = args[1] || 'daily';
if (!['daily', 'weekly', 'monthly'].includes(type)) {
console.error('Invalid backup type. Use: daily, weekly, or monthly');
process.exit(1);
}
console.log(`Creating ${type} backup...`);
const backup = await backupManager.createBackup(type);
console.log('Backup created successfully!');
console.log(`ID: ${backup.id}`);
console.log(`Size: ${(backup.size / 1024 / 1024).toFixed(2)} MB`);
console.log(`Tables: ${backup.tables.join(', ')}`);
console.log(`Checksum: ${backup.checksum}`);
}
async function restoreBackup() {
const backupId = args[1];
const confirmFlag = args.includes('--confirm');
const dryRun = args.includes('--dry-run');
const tables = args.includes('--tables') ?
args[args.indexOf('--tables') + 1].split(',') : undefined;
if (!backupId) {
console.error('Backup ID is required');
console.log('Usage: node scripts/backup.js restore <backup-id> [--confirm] [--dry-run] [--tables table1,table2]');
process.exit(1);
}
if (!confirmFlag && !dryRun) {
console.error('WARNING: This will overwrite existing data!');
console.error('Use --confirm to proceed or --dry-run to simulate');
process.exit(1);
}
console.log(`${dryRun ? 'Simulating' : 'Starting'} restore from backup: ${backupId}`);
await backupManager.restoreBackup(backupId, {
tables,
dryRun,
confirmRestore: confirmFlag
});
console.log(`Restore ${dryRun ? 'simulation' : 'process'} completed successfully!`);
}
async function listBackups() {
console.log('Listing available backups...');
const backups = await backupManager.listBackups();
if (backups.length === 0) {
console.log('No backups found');
return;
}
console.log('\nAvailable backups:');
console.log('─'.repeat(80));
for (const backup of backups) {
const date = new Date(backup.timestamp).toLocaleString();
const size = (backup.size / 1024 / 1024).toFixed(2);
const status = backup.status === 'completed' ? '✓' :
backup.status === 'failed' ? '✗' : '⏳';
console.log(`${status} ${backup.id}`);
console.log(` Type: ${backup.type}`);
console.log(` Date: ${date}`);
console.log(` Size: ${size} MB`);
console.log(` Tables: ${backup.tables.length}`);
console.log('');
}
}
async function cleanupBackups() {
console.log('Cleaning up old backups...');
await backupManager.cleanupBackups();
console.log('Cleanup completed!');
}
async function startScheduler() {
console.log('Starting backup scheduler...');
backupScheduler.startScheduledBackups();
console.log('Backup scheduler is running. Press Ctrl+C to stop.');
// Keep the process running
process.on('SIGINT', () => {
console.log('\nStopping backup scheduler...');
backupScheduler.stopScheduledBackups();
process.exit(0);
});
// Keep alive
setInterval(() => {
console.log(`Scheduler running... ${new Date().toLocaleString()}`);
}, 60000); // Log every minute
}
async function verifyIntegrity() {
console.log('Verifying system integrity...');
const result = await DisasterRecovery.verifySystemIntegrity();
console.log(`\nSystem Status: ${result.status.toUpperCase()}`);
console.log('─'.repeat(50));
for (const check of result.checks) {
const icon = check.status === 'pass' ? '✓' : '✗';
console.log(`${icon} ${check.name}: ${check.message}`);
}
if (result.status !== 'healthy') {
console.log('\n⚠ System requires attention!');
process.exit(1);
} else {
console.log('\n✅ System is healthy');
}
}
async function createDisasterRecoveryBackup() {
const label = args[1] || `emergency-${Date.now()}`;
console.log(`Creating disaster recovery backup: ${label}`);
const backup = await DisasterRecovery.createPointInTimeBackup(label);
console.log('Disaster recovery backup created successfully!');
console.log(`ID: ${backup.id}`);
console.log(`Size: ${(backup.size / 1024 / 1024).toFixed(2)} MB`);
console.log(`Location: disaster-recovery/${label}`);
}
function showUsage() {
console.log(`
Backup Management Script
Usage: node scripts/backup.js <command> [options]
Commands:
create [type] Create a new backup (daily|weekly|monthly)
restore <id> [options] Restore from backup
list List available backups
cleanup Remove old backups according to retention policy
schedule Start automated backup scheduler
verify Verify system integrity
disaster-recovery [label] Create emergency backup
Restore options:
--confirm Confirm destructive restore operation
--dry-run Simulate restore without making changes
--tables table1,table2 Only restore specified tables
Examples:
node scripts/backup.js create daily
node scripts/backup.js restore daily-1234567890 --confirm
node scripts/backup.js restore daily-1234567890 --dry-run --tables users,events
node scripts/backup.js disaster-recovery pre-migration
node scripts/backup.js verify
`);
}
// Run the main function
main().catch(console.error);

85
scripts/run-scraper.js Executable file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env node
/**
* Standalone event scraper script
* Can be run manually or via cron job
*
* Usage:
* node scripts/run-scraper.js
* node scripts/run-scraper.js --init (to initialize scraper organization)
*/
import { runEventScraper, initializeScraperOrganization } from '../src/lib/eventScraper.js';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { config } from 'dotenv';
// Get the directory of this script
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Load environment variables from the project root
config({ path: join(__dirname, '..', '.env') });
config({ path: join(__dirname, '..', '.env.local') });
async function main() {
const args = process.argv.slice(2);
const shouldInit = args.includes('--init');
console.log('🚀 Black Canyon Tickets Event Scraper');
console.log('======================================');
try {
if (shouldInit) {
console.log('🔧 Initializing scraper organization...');
const initialized = await initializeScraperOrganization();
if (initialized) {
console.log('✅ Scraper organization initialized successfully');
} else {
console.error('❌ Failed to initialize scraper organization');
process.exit(1);
}
}
console.log('🔍 Running event scraper...');
const result = await runEventScraper();
if (result.success) {
console.log('✅', result.message);
if (result.newEvent) {
console.log('📅 New Featured Event Added:');
console.log(` Title: ${result.newEvent.title}`);
console.log(` Venue: ${result.newEvent.venue}`);
console.log(` Category: ${result.newEvent.category}`);
console.log(` Start Time: ${result.newEvent.startTime}`);
if (result.newEvent.imageUrl) {
console.log(` Image: ${result.newEvent.imageUrl}`);
}
}
} else {
console.error('❌', result.message);
process.exit(1);
}
} catch (error) {
console.error('💥 Scraper script failed:', error);
process.exit(1);
}
}
// Handle uncaught errors
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
process.exit(1);
});
process.on('uncaughtException', (error) => {
console.error('Uncaught Exception:', error);
process.exit(1);
});
// Run the script
main();

197
scripts/scheduled-scraper.js Executable file
View File

@@ -0,0 +1,197 @@
#!/usr/bin/env node
/**
* Scheduled event scraper with node-cron
* Runs continuously and checks for new events on a schedule
*
* Usage:
* node scripts/scheduled-scraper.js
*
* Default schedule: Every 15 minutes
* Set SCRAPER_CRON_SCHEDULE environment variable to override
*/
import cron from 'node-cron';
import { runEventScraper, initializeScraperOrganization } from '../src/lib/eventScraper.js';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { config } from 'dotenv';
// Get the directory of this script
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Load environment variables
config({ path: join(__dirname, '..', '.env') });
config({ path: join(__dirname, '..', '.env.local') });
// Configuration
const CRON_SCHEDULE = process.env.SCRAPER_CRON_SCHEDULE || '*/15 * * * *'; // Every 15 minutes
const TIMEZONE = process.env.SCRAPER_TIMEZONE || 'America/Denver'; // Mountain Time
let isRunning = false;
let successCount = 0;
let errorCount = 0;
let lastRunTime = null;
let lastNewEvent = null;
/**
* The scheduled scraper function
*/
async function scheduledScraper() {
if (isRunning) {
console.log('⏳ Scraper already running, skipping this cycle');
return;
}
isRunning = true;
lastRunTime = new Date();
try {
console.log(`🔍 [${lastRunTime.toISOString()}] Running scheduled event scraper...`);
const result = await runEventScraper();
if (result.success) {
successCount++;
if (result.newEvent) {
lastNewEvent = result.newEvent;
console.log(`🎉 [${new Date().toISOString()}] NEW FEATURED EVENT ADDED!`);
console.log(` ${result.newEvent.title} at ${result.newEvent.venue}`);
console.log(` Category: ${result.newEvent.category}`);
// You could add webhook notifications here
// await notifyWebhook(result.newEvent);
} else {
console.log(`✅ [${new Date().toISOString()}] ${result.message}`);
}
} else {
errorCount++;
console.error(`❌ [${new Date().toISOString()}] ${result.message}`);
}
} catch (error) {
errorCount++;
console.error(`💥 [${new Date().toISOString()}] Scheduled scraper error:`, error);
} finally {
isRunning = false;
}
}
/**
* Print status information
*/
function printStatus() {
console.log('\n📊 SCRAPER STATUS');
console.log('==================');
console.log(`Schedule: ${CRON_SCHEDULE} (${TIMEZONE})`);
console.log(`Running: ${isRunning ? 'Yes' : 'No'}`);
console.log(`Success Count: ${successCount}`);
console.log(`Error Count: ${errorCount}`);
console.log(`Last Run: ${lastRunTime ? lastRunTime.toISOString() : 'Never'}`);
if (lastNewEvent) {
console.log(`Last New Event: ${lastNewEvent.title} (${lastNewEvent.category})`);
}
console.log(`Uptime: ${Math.floor(process.uptime())} seconds`);
console.log('==================\n');
}
/**
* Initialize and start the scheduler
*/
async function start() {
console.log('🚀 Black Canyon Tickets Scheduled Event Scraper');
console.log('================================================');
console.log(`Schedule: ${CRON_SCHEDULE}`);
console.log(`Timezone: ${TIMEZONE}`);
console.log('Press Ctrl+C to stop\n');
// Initialize scraper organization if needed
try {
console.log('🔧 Checking scraper organization...');
await initializeScraperOrganization();
console.log('✅ Scraper organization ready\n');
} catch (error) {
console.error('❌ Failed to initialize scraper organization:', error);
process.exit(1);
}
// Run once immediately
console.log('🏃 Running initial scrape...');
await scheduledScraper();
// Schedule the cron job
const task = cron.schedule(CRON_SCHEDULE, scheduledScraper, {
scheduled: false,
timezone: TIMEZONE
});
// Start the scheduler
task.start();
console.log(`⏰ Scheduler started. Next run: ${task.nextDates()}`);
// Print status every 5 minutes
setInterval(printStatus, 5 * 60 * 1000);
return task;
}
/**
* Graceful shutdown
*/
function setupGracefulShutdown(task) {
const shutdown = (signal) => {
console.log(`\n🛑 Received ${signal}, shutting down gracefully...`);
if (task) {
task.stop();
console.log('✅ Scheduler stopped');
}
printStatus();
console.log('👋 Goodbye!');
process.exit(0);
};
process.on('SIGINT', () => shutdown('SIGINT'));
process.on('SIGTERM', () => shutdown('SIGTERM'));
}
/**
* Handle uncaught errors
*/
function setupErrorHandlers() {
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
errorCount++;
});
process.on('uncaughtException', (error) => {
console.error('Uncaught Exception:', error);
errorCount++;
});
}
/**
* Main function
*/
async function main() {
setupErrorHandlers();
const task = await start();
setupGracefulShutdown(task);
// Keep the process alive
setInterval(() => {
// Keep alive
}, 1000);
}
// Start the scheduled scraper
main().catch(error => {
console.error('Failed to start scheduled scraper:', error);
process.exit(1);
});