Add comprehensive database setup and user management system
- Implement PostgreSQL database schema with users and bookmarks tables - Add database connection pooling with retry logic and error handling - Create migration system with automatic schema initialization - Add database CLI tools for management (init, status, validate, etc.) - Include comprehensive error handling and diagnostics - Add development seed data and testing utilities - Implement health monitoring and connection pool statistics - Create detailed documentation and troubleshooting guide Database features: - Users table with authentication fields and email verification - Bookmarks table with user association and metadata - Proper indexes for performance optimization - Automatic timestamp triggers - Transaction support with rollback handling - Connection pooling (20 max connections, 30s idle timeout) - Graceful shutdown handling CLI commands available: - npm run db:init - Initialize database - npm run db:status - Check database status - npm run db:validate - Validate schema - npm run db:test - Run database tests - npm run db:diagnostics - Full diagnostics
This commit is contained in:
263
backend/src/database/init.js
Normal file
263
backend/src/database/init.js
Normal file
@ -0,0 +1,263 @@
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const dbConnection = require('./connection');
|
||||
|
||||
class DatabaseInitializer {
|
||||
constructor() {
|
||||
this.migrationsPath = path.join(__dirname, 'migrations');
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database with all migrations
|
||||
*/
|
||||
async initialize() {
|
||||
try {
|
||||
console.log('🚀 Starting database initialization...');
|
||||
|
||||
// Connect to database
|
||||
await dbConnection.connect();
|
||||
|
||||
// Create migrations table if it doesn't exist
|
||||
await this.createMigrationsTable();
|
||||
|
||||
// Run all migrations
|
||||
await this.runMigrations();
|
||||
|
||||
// Seed initial data if needed
|
||||
await this.seedData();
|
||||
|
||||
console.log('✅ Database initialization completed successfully');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Database initialization failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create migrations tracking table
|
||||
*/
|
||||
async createMigrationsTable() {
|
||||
const createMigrationsTableSQL = `
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
id SERIAL PRIMARY KEY,
|
||||
filename VARCHAR(255) NOT NULL UNIQUE,
|
||||
executed_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
await dbConnection.query(createMigrationsTableSQL);
|
||||
console.log('📋 Migrations table ready');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of migration files
|
||||
*/
|
||||
async getMigrationFiles() {
|
||||
try {
|
||||
const files = await fs.readdir(this.migrationsPath);
|
||||
return files
|
||||
.filter(file => file.endsWith('.sql'))
|
||||
.sort(); // Ensure migrations run in order
|
||||
} catch (error) {
|
||||
console.error('❌ Error reading migrations directory:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get executed migrations from database
|
||||
*/
|
||||
async getExecutedMigrations() {
|
||||
try {
|
||||
const result = await dbConnection.query('SELECT filename FROM migrations ORDER BY executed_at');
|
||||
return result.rows.map(row => row.filename);
|
||||
} catch (error) {
|
||||
console.error('❌ Error fetching executed migrations:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run all pending migrations
|
||||
*/
|
||||
async runMigrations() {
|
||||
const migrationFiles = await this.getMigrationFiles();
|
||||
const executedMigrations = await this.getExecutedMigrations();
|
||||
|
||||
const pendingMigrations = migrationFiles.filter(
|
||||
file => !executedMigrations.includes(file)
|
||||
);
|
||||
|
||||
if (pendingMigrations.length === 0) {
|
||||
console.log('📝 No pending migrations to run');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`📝 Running ${pendingMigrations.length} pending migrations...`);
|
||||
|
||||
for (const migrationFile of pendingMigrations) {
|
||||
await this.runMigration(migrationFile);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a single migration
|
||||
*/
|
||||
async runMigration(filename) {
|
||||
try {
|
||||
console.log(`🔄 Running migration: ${filename}`);
|
||||
|
||||
const migrationPath = path.join(this.migrationsPath, filename);
|
||||
const migrationSQL = await fs.readFile(migrationPath, 'utf8');
|
||||
|
||||
// Execute migration in a transaction
|
||||
await dbConnection.transaction(async (client) => {
|
||||
// Execute the migration SQL
|
||||
await client.query(migrationSQL);
|
||||
|
||||
// Record the migration as executed
|
||||
await client.query(
|
||||
'INSERT INTO migrations (filename) VALUES ($1)',
|
||||
[filename]
|
||||
);
|
||||
});
|
||||
|
||||
console.log(`✅ Migration completed: ${filename}`);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`❌ Migration failed: ${filename}`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Seed initial data
|
||||
*/
|
||||
async seedData() {
|
||||
try {
|
||||
// Check if we need to seed data (only in development)
|
||||
if (process.env.NODE_ENV !== 'development') {
|
||||
console.log('🌱 Skipping seed data (not in development mode)');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if users already exist
|
||||
const userCount = await dbConnection.query('SELECT COUNT(*) FROM users');
|
||||
if (parseInt(userCount.rows[0].count) > 0) {
|
||||
console.log('🌱 Skipping seed data (users already exist)');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('🌱 Seeding initial data...');
|
||||
|
||||
// Create a test user (only in development)
|
||||
const bcrypt = require('bcrypt');
|
||||
const testPassword = await bcrypt.hash('TestPassword123!', 12);
|
||||
|
||||
await dbConnection.query(`
|
||||
INSERT INTO users (email, password_hash, is_verified)
|
||||
VALUES ($1, $2, $3)
|
||||
`, ['test@example.com', testPassword, true]);
|
||||
|
||||
// Get the test user ID for sample bookmarks
|
||||
const testUser = await dbConnection.query(
|
||||
'SELECT id FROM users WHERE email = $1',
|
||||
['test@example.com']
|
||||
);
|
||||
const testUserId = testUser.rows[0].id;
|
||||
|
||||
// Create sample bookmarks
|
||||
const sampleBookmarks = [
|
||||
{
|
||||
title: 'Google',
|
||||
url: 'https://www.google.com',
|
||||
folder: 'Search Engines',
|
||||
add_date: new Date()
|
||||
},
|
||||
{
|
||||
title: 'GitHub',
|
||||
url: 'https://github.com',
|
||||
folder: 'Development',
|
||||
add_date: new Date()
|
||||
},
|
||||
{
|
||||
title: 'Stack Overflow',
|
||||
url: 'https://stackoverflow.com',
|
||||
folder: 'Development',
|
||||
add_date: new Date()
|
||||
}
|
||||
];
|
||||
|
||||
for (const bookmark of sampleBookmarks) {
|
||||
await dbConnection.query(`
|
||||
INSERT INTO bookmarks (user_id, title, url, folder, add_date)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
`, [testUserId, bookmark.title, bookmark.url, bookmark.folder, bookmark.add_date]);
|
||||
}
|
||||
|
||||
console.log('✅ Seed data created successfully');
|
||||
console.log('👤 Test user: test@example.com / TestPassword123!');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Seeding failed:', error);
|
||||
// Don't throw error for seeding failures in production
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset database (drop all tables) - USE WITH CAUTION
|
||||
*/
|
||||
async reset() {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
throw new Error('Database reset is not allowed in production');
|
||||
}
|
||||
|
||||
console.log('⚠️ Resetting database...');
|
||||
|
||||
await dbConnection.query('DROP TABLE IF EXISTS bookmarks CASCADE');
|
||||
await dbConnection.query('DROP TABLE IF EXISTS users CASCADE');
|
||||
await dbConnection.query('DROP TABLE IF EXISTS migrations CASCADE');
|
||||
await dbConnection.query('DROP FUNCTION IF EXISTS update_updated_at_column() CASCADE');
|
||||
|
||||
console.log('🗑️ Database reset completed');
|
||||
|
||||
// Re-initialize
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get database status
|
||||
*/
|
||||
async getStatus() {
|
||||
try {
|
||||
const health = await dbConnection.healthCheck();
|
||||
const migrationFiles = await this.getMigrationFiles();
|
||||
const executedMigrations = await this.getExecutedMigrations();
|
||||
|
||||
return {
|
||||
...health,
|
||||
migrations: {
|
||||
total: migrationFiles.length,
|
||||
executed: executedMigrations.length,
|
||||
pending: migrationFiles.length - executedMigrations.length,
|
||||
files: migrationFiles,
|
||||
executed_files: executedMigrations
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
healthy: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const dbInitializer = new DatabaseInitializer();
|
||||
|
||||
module.exports = dbInitializer;
|
||||
Reference in New Issue
Block a user