Add comprehensive database setup and user management system

- Implement PostgreSQL database schema with users and bookmarks tables
- Add database connection pooling with retry logic and error handling
- Create migration system with automatic schema initialization
- Add database CLI tools for management (init, status, validate, etc.)
- Include comprehensive error handling and diagnostics
- Add development seed data and testing utilities
- Implement health monitoring and connection pool statistics
- Create detailed documentation and troubleshooting guide

Database features:
- Users table with authentication fields and email verification
- Bookmarks table with user association and metadata
- Proper indexes for performance optimization
- Automatic timestamp triggers
- Transaction support with rollback handling
- Connection pooling (20 max connections, 30s idle timeout)
- Graceful shutdown handling

CLI commands available:
- npm run db:init - Initialize database
- npm run db:status - Check database status
- npm run db:validate - Validate schema
- npm run db:test - Run database tests
- npm run db:diagnostics - Full diagnostics
This commit is contained in:
2025-07-19 23:21:50 +02:00
commit 0abee5b794
66 changed files with 45023 additions and 0 deletions

33
backend/.env.example Normal file
View File

@ -0,0 +1,33 @@
# Server Configuration
NODE_ENV=development
PORT=3001
# Database Configuration
DB_HOST=localhost
DB_PORT=5432
DB_NAME=bookmark_manager
DB_USER=your_db_user
DB_PASSWORD=your_db_password
DB_SSL=false
# JWT Configuration
JWT_SECRET=your_super_secret_jwt_key_here_make_it_long_and_random
JWT_EXPIRES_IN=24h
# Email Configuration
EMAIL_HOST=smtp.gmail.com
EMAIL_PORT=587
EMAIL_SECURE=false
EMAIL_USER=your_email@gmail.com
EMAIL_PASSWORD=your_app_password
EMAIL_FROM=your_email@gmail.com
# Application Configuration
ALLOWED_ORIGINS=http://localhost:3000,http://127.0.0.1:3000
BASE_URL=http://localhost:3001
# Security Configuration
BCRYPT_SALT_ROUNDS=12
RATE_LIMIT_WINDOW_MS=900000
RATE_LIMIT_MAX_REQUESTS=100
AUTH_RATE_LIMIT_MAX=5

62
backend/.gitignore vendored Normal file
View File

@ -0,0 +1,62 @@
# Dependencies
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Environment variables
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
# Logs
logs
*.log
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Coverage directory used by tools like istanbul
coverage/
# nyc test coverage
.nyc_output
# Dependency directories
node_modules/
jspm_packages/
# Optional npm cache directory
.npm
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
# IDE files
.vscode/
.idea/
*.swp
*.swo
# OS generated files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db

1539
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

36
backend/package.json Normal file
View File

@ -0,0 +1,36 @@
{
"name": "backend",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"start": "node server.js",
"dev": "nodemon server.js",
"test": "node test-db-setup.js",
"db:init": "node scripts/db-cli.js init",
"db:status": "node scripts/db-cli.js status",
"db:reset": "node scripts/db-cli.js reset",
"db:validate": "node scripts/db-cli.js validate",
"db:cleanup": "node scripts/db-cli.js cleanup",
"db:diagnostics": "node scripts/db-cli.js diagnostics",
"db:test": "node test-db-setup.js"
},
"keywords": [],
"author": "",
"license": "ISC",
"type": "commonjs",
"dependencies": {
"bcrypt": "^6.0.0",
"cookie-parser": "^1.4.7",
"dotenv": "^17.2.0",
"express": "^5.1.0",
"express-rate-limit": "^8.0.1",
"helmet": "^8.1.0",
"jsonwebtoken": "^9.0.2",
"nodemailer": "^7.0.5",
"pg": "^8.16.3"
},
"devDependencies": {
"nodemon": "^3.1.10"
}
}

96
backend/scripts/db-cli.js Executable file
View File

@ -0,0 +1,96 @@
#!/usr/bin/env node
/**
* Database CLI utility for managing the bookmark manager database
* Usage: node scripts/db-cli.js <command>
*/
require('dotenv').config();
const dbInitializer = require('../src/database/init');
const dbConnection = require('../src/database/connection');
const dbUtils = require('../src/database/utils');
const commands = {
init: 'Initialize database with migrations',
status: 'Show database status and diagnostics',
reset: 'Reset database (development only)',
validate: 'Validate database schema',
cleanup: 'Clean up expired tokens and old data',
diagnostics: 'Run comprehensive database diagnostics',
help: 'Show this help message'
};
async function runCommand(command) {
try {
switch (command) {
case 'init':
console.log('🚀 Initializing database...');
await dbInitializer.initialize();
break;
case 'status':
console.log('📊 Getting database status...');
const status = await dbInitializer.getStatus();
console.log(JSON.stringify(status, null, 2));
break;
case 'reset':
if (process.env.NODE_ENV === 'production') {
console.error('❌ Reset is not allowed in production');
process.exit(1);
}
console.log('⚠️ Resetting database...');
await dbInitializer.reset();
break;
case 'validate':
console.log('🔍 Validating database schema...');
const validation = await dbUtils.validateSchema();
console.log(JSON.stringify(validation, null, 2));
if (validation.valid) {
console.log('✅ Schema validation passed');
} else {
console.log('❌ Schema validation failed');
process.exit(1);
}
break;
case 'cleanup':
console.log('🧹 Running database cleanup...');
await dbUtils.cleanup();
break;
case 'diagnostics':
console.log('🔍 Running database diagnostics...');
const diagnostics = await dbUtils.diagnostics();
console.log(JSON.stringify(diagnostics, null, 2));
break;
case 'help':
default:
console.log('📖 Database CLI Commands:');
console.log('');
Object.entries(commands).forEach(([cmd, desc]) => {
console.log(` ${cmd.padEnd(12)} - ${desc}`);
});
console.log('');
console.log('Usage: node scripts/db-cli.js <command>');
break;
}
} catch (error) {
console.error('❌ Command failed:', error.message);
process.exit(1);
} finally {
await dbConnection.close();
}
}
// Get command from command line arguments
const command = process.argv[2];
if (!command) {
runCommand('help');
} else {
runCommand(command);
}

45
backend/server.js Normal file
View File

@ -0,0 +1,45 @@
const app = require('./src/app');
const dbInitializer = require('./src/database/init');
const dbConnection = require('./src/database/connection');
const PORT = process.env.PORT || 3001;
// Initialize database and start server
async function startServer() {
try {
console.log('🚀 Starting Bookmark Manager Backend...');
// Initialize database
await dbInitializer.initialize();
// Start the server
const server = app.listen(PORT, () => {
console.log(`✅ Server is running on port ${PORT}`);
console.log(`🌍 Environment: ${process.env.NODE_ENV || 'development'}`);
console.log(`🔗 Health check: http://localhost:${PORT}/health`);
});
// Graceful shutdown handling
process.on('SIGTERM', async () => {
console.log('🛑 SIGTERM received, shutting down gracefully...');
server.close(async () => {
await dbConnection.close();
process.exit(0);
});
});
process.on('SIGINT', async () => {
console.log('🛑 SIGINT received, shutting down gracefully...');
server.close(async () => {
await dbConnection.close();
process.exit(0);
});
});
} catch (error) {
console.error('❌ Failed to start server:', error);
process.exit(1);
}
}
startServer();

113
backend/src/app.js Normal file
View File

@ -0,0 +1,113 @@
const express = require('express');
const helmet = require('helmet');
const rateLimit = require('express-rate-limit');
const cookieParser = require('cookie-parser');
require('dotenv').config();
const app = express();
// Security middleware
app.use(helmet());
// Rate limiting
const limiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 100, // limit each IP to 100 requests per windowMs
message: 'Too many requests from this IP, please try again later.'
});
app.use(limiter);
// Body parsing middleware
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true }));
app.use(cookieParser());
// CORS middleware
app.use((req, res, next) => {
const allowedOrigins = process.env.ALLOWED_ORIGINS?.split(',') || ['http://localhost:3000'];
const origin = req.headers.origin;
if (allowedOrigins.includes(origin)) {
res.setHeader('Access-Control-Allow-Origin', origin);
}
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization');
res.setHeader('Access-Control-Allow-Credentials', 'true');
if (req.method === 'OPTIONS') {
res.sendStatus(200);
} else {
next();
}
});
// Health check endpoint
app.get('/health', async (req, res) => {
try {
const dbConnection = require('./database/connection');
const dbUtils = require('./database/utils');
const health = await dbConnection.healthCheck();
const diagnostics = await dbUtils.diagnostics();
res.json({
status: health.healthy ? 'OK' : 'ERROR',
timestamp: new Date().toISOString(),
database: health,
diagnostics: diagnostics
});
} catch (error) {
res.status(500).json({
status: 'ERROR',
timestamp: new Date().toISOString(),
error: error.message
});
}
});
// Database status endpoint
app.get('/db-status', async (req, res) => {
try {
const dbInitializer = require('./database/init');
const dbUtils = require('./database/utils');
const status = await dbInitializer.getStatus();
const validation = await dbUtils.validateSchema();
res.json({
timestamp: new Date().toISOString(),
...status,
schema: validation
});
} catch (error) {
res.status(500).json({
error: error.message,
timestamp: new Date().toISOString()
});
}
});
// API routes will be added here
// app.use('/api/auth', require('./routes/auth'));
// app.use('/api/user', require('./routes/user'));
// app.use('/api/bookmarks', require('./routes/bookmarks'));
// Error handling middleware
app.use((err, req, res, next) => {
console.error(err.stack);
res.status(500).json({
error: 'Something went wrong!',
timestamp: new Date().toISOString()
});
});
// 404 handler
app.use((req, res) => {
res.status(404).json({
error: 'Route not found',
timestamp: new Date().toISOString()
});
});
module.exports = app;

View File

@ -0,0 +1,15 @@
require('dotenv').config();
const config = {
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT) || 5432,
database: process.env.DB_NAME || 'bookmark_manager',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'password',
ssl: process.env.DB_SSL === 'true' ? { rejectUnauthorized: false } : false,
max: 20, // maximum number of clients in the pool
idleTimeoutMillis: 30000, // how long a client is allowed to remain idle
connectionTimeoutMillis: 2000, // how long to wait when connecting a new client
};
module.exports = config;

View File

@ -0,0 +1,44 @@
require('dotenv').config();
const config = {
// Server
port: parseInt(process.env.PORT) || 3001,
nodeEnv: process.env.NODE_ENV || 'development',
// Database
database: require('./database'),
// JWT
jwt: {
secret: process.env.JWT_SECRET || 'fallback_secret_for_development_only',
expiresIn: process.env.JWT_EXPIRES_IN || '24h'
},
// Email
email: {
host: process.env.EMAIL_HOST || 'smtp.gmail.com',
port: parseInt(process.env.EMAIL_PORT) || 587,
secure: process.env.EMAIL_SECURE === 'true',
user: process.env.EMAIL_USER,
password: process.env.EMAIL_PASSWORD,
from: process.env.EMAIL_FROM
},
// Security
bcrypt: {
saltRounds: parseInt(process.env.BCRYPT_SALT_ROUNDS) || 12
},
// Rate Limiting
rateLimit: {
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW_MS) || 15 * 60 * 1000,
maxRequests: parseInt(process.env.RATE_LIMIT_MAX_REQUESTS) || 100,
authMaxRequests: parseInt(process.env.AUTH_RATE_LIMIT_MAX) || 5
},
// Application
baseUrl: process.env.BASE_URL || 'http://localhost:3001',
allowedOrigins: process.env.ALLOWED_ORIGINS?.split(',') || ['http://localhost:3000']
};
module.exports = config;

View File

@ -0,0 +1 @@
# This file ensures the controllers directory is tracked by git

View File

@ -0,0 +1,204 @@
# Database Setup and Management
This directory contains all database-related code for the Bookmark Manager backend.
## Prerequisites
1. **PostgreSQL** must be installed and running
2. **Database** must be created (default: `bookmark_manager`)
3. **Environment variables** must be configured in `.env`
## Quick Setup
1. Install PostgreSQL (if not already installed):
```bash
# Ubuntu/Debian
sudo apt-get install postgresql postgresql-contrib
# macOS with Homebrew
brew install postgresql
# Windows - Download from postgresql.org
```
2. Start PostgreSQL service:
```bash
# Ubuntu/Debian
sudo systemctl start postgresql
# macOS with Homebrew
brew services start postgresql
```
3. Create database and user:
```bash
# Connect to PostgreSQL as superuser
sudo -u postgres psql
# Create database
CREATE DATABASE bookmark_manager;
# Create user (optional, can use postgres user)
CREATE USER bookmark_user WITH PASSWORD 'your_password';
GRANT ALL PRIVILEGES ON DATABASE bookmark_manager TO bookmark_user;
# Exit
\q
```
4. Update `.env` file with your database credentials
5. Initialize database:
```bash
npm run db:init
```
## File Structure
```
database/
├── README.md # This file
├── connection.js # Database connection pool management
├── init.js # Database initialization and migrations
├── utils.js # Database utility functions
└── migrations/ # SQL migration files
├── 001_create_users_table.sql
└── 002_create_bookmarks_table.sql
```
## Available Commands
```bash
# Initialize database with migrations
npm run db:init
# Check database status
npm run db:status
# Validate database schema
npm run db:validate
# Run database diagnostics
npm run db:diagnostics
# Clean up expired data
npm run db:cleanup
# Reset database (development only)
npm run db:reset
```
## Database Schema
### Users Table
- `id` (UUID) - Primary key
- `email` (VARCHAR) - Unique email address
- `password_hash` (VARCHAR) - bcrypt hashed password
- `is_verified` (BOOLEAN) - Email verification status
- `created_at` (TIMESTAMP) - Account creation time
- `updated_at` (TIMESTAMP) - Last update time
- `last_login` (TIMESTAMP) - Last successful login
- `verification_token` (VARCHAR) - Email verification token
- `reset_token` (VARCHAR) - Password reset token
- `reset_expires` (TIMESTAMP) - Reset token expiration
### Bookmarks Table
- `id` (UUID) - Primary key
- `user_id` (UUID) - Foreign key to users table
- `title` (VARCHAR) - Bookmark title
- `url` (TEXT) - Target URL
- `folder` (VARCHAR) - Folder path
- `add_date` (TIMESTAMP) - Original bookmark date
- `last_modified` (TIMESTAMP) - Last modification date
- `icon` (TEXT) - Favicon URL or data URI
- `status` (VARCHAR) - Link status (unknown, valid, invalid, testing, duplicate)
- `created_at` (TIMESTAMP) - Database creation time
- `updated_at` (TIMESTAMP) - Database update time
## Connection Configuration
The database connection uses PostgreSQL connection pooling with the following default settings:
- **Max connections**: 20
- **Idle timeout**: 30 seconds
- **Connection timeout**: 2 seconds
- **Retry attempts**: 5 with exponential backoff
## Error Handling
The database layer includes comprehensive error handling:
- **Connection failures**: Automatic retry with exponential backoff
- **Query errors**: Detailed logging with performance metrics
- **Transaction support**: Automatic rollback on errors
- **Health monitoring**: Connection pool statistics and health checks
## Migration System
Migrations are automatically tracked in the `migrations` table:
- Migrations run in alphabetical order
- Each migration is recorded when successfully executed
- Failed migrations prevent application startup
- Migrations are idempotent (safe to run multiple times)
## Development vs Production
### Development
- Detailed query logging enabled
- Seed data automatically created
- Database reset command available
- Test user created: `test@example.com` / `TestPassword123!`
### Production
- Query logging disabled for performance
- No seed data creation
- Database reset disabled
- Enhanced security settings
## Troubleshooting
### Connection Issues
1. Verify PostgreSQL is running: `sudo systemctl status postgresql`
2. Check database exists: `psql -l`
3. Test connection: `psql -h localhost -U postgres -d bookmark_manager`
4. Verify credentials in `.env` file
### Migration Issues
1. Check migration files exist in `migrations/` directory
2. Verify database user has CREATE privileges
3. Check migration logs for specific errors
4. Use `npm run db:validate` to check schema
### Performance Issues
1. Monitor connection pool: `npm run db:diagnostics`
2. Check for long-running queries
3. Verify indexes are created properly
4. Monitor database logs
## Security Considerations
- All queries use parameterized statements to prevent SQL injection
- Connection strings should use environment variables
- Database user should have minimal required privileges
- Regular cleanup of expired tokens and old data
- Connection pooling prevents connection exhaustion attacks
## Backup and Recovery
For production deployments:
1. Set up regular database backups using `pg_dump`
2. Test backup restoration procedures
3. Monitor database size and performance
4. Implement log rotation for database logs
5. Consider read replicas for high availability
## Monitoring
The database layer provides several monitoring endpoints:
- `/health` - Basic health check with database status
- `/db-status` - Detailed database status and migration info
- Connection pool statistics via `getStats()` method
- Query performance logging in development mode

View File

@ -0,0 +1,238 @@
const { Pool } = require('pg');
const fs = require('fs').promises;
const path = require('path');
const dbConfig = require('../config/database');
class DatabaseConnection {
constructor() {
this.pool = null;
this.isConnected = false;
this.retryAttempts = 0;
this.maxRetries = 5;
this.retryDelay = 2000; // 2 seconds
}
/**
* Initialize database connection pool
*/
async connect() {
try {
this.pool = new Pool(dbConfig);
// Test the connection
const client = await this.pool.connect();
await client.query('SELECT NOW()');
client.release();
this.isConnected = true;
this.retryAttempts = 0;
console.log('✅ Database connected successfully');
console.log(`📊 Pool config: max=${dbConfig.max}, idle=${dbConfig.idleTimeoutMillis}ms`);
// Set up connection event handlers
this.setupEventHandlers();
return this.pool;
} catch (error) {
console.error('❌ Database connection failed:', error.message);
await this.handleConnectionError(error);
throw error;
}
}
/**
* Set up event handlers for connection monitoring
*/
setupEventHandlers() {
this.pool.on('connect', (client) => {
console.log('🔗 New database client connected');
});
this.pool.on('acquire', (client) => {
console.log('📥 Database client acquired from pool');
});
this.pool.on('remove', (client) => {
console.log('📤 Database client removed from pool');
});
this.pool.on('error', async (error, client) => {
console.error('❌ Database pool error:', error.message);
this.isConnected = false;
await this.handleConnectionError(error);
});
}
/**
* Handle connection errors with retry logic
*/
async handleConnectionError(error) {
// Provide helpful error messages based on error type
if (error.code === 'ECONNREFUSED') {
console.error('❌ Connection refused - PostgreSQL server is not running or not accessible');
console.error('💡 Make sure PostgreSQL is installed and running on the configured host and port');
console.error(`💡 Current config: ${dbConfig.host}:${dbConfig.port}`);
} else if (error.code === 'ENOTFOUND') {
console.error('❌ Host not found - Check your database host configuration');
console.error(`💡 Current host: ${dbConfig.host}`);
} else if (error.message.includes('password authentication failed')) {
console.error('❌ Authentication failed - Check your database credentials');
console.error(`💡 Current user: ${dbConfig.user}`);
} else if (error.message.includes('database') && error.message.includes('does not exist')) {
console.error('❌ Database does not exist - Create the database first');
console.error(`💡 Database name: ${dbConfig.database}`);
console.error('💡 Run: createdb ' + dbConfig.database);
}
if (this.retryAttempts < this.maxRetries) {
this.retryAttempts++;
console.log(`🔄 Retrying database connection (${this.retryAttempts}/${this.maxRetries}) in ${this.retryDelay}ms...`);
await new Promise(resolve => setTimeout(resolve, this.retryDelay));
try {
await this.connect();
} catch (retryError) {
console.error(`❌ Retry ${this.retryAttempts} failed:`, retryError.message);
// Exponential backoff
this.retryDelay *= 2;
if (this.retryAttempts >= this.maxRetries) {
console.error('💥 Max retry attempts reached. Database connection failed permanently.');
console.error('');
console.error('🔧 To fix this issue:');
console.error('1. Install PostgreSQL if not already installed');
console.error('2. Start PostgreSQL service');
console.error('3. Create the database: createdb ' + dbConfig.database);
console.error('4. Update .env file with correct database credentials');
console.error('');
process.exit(1);
}
}
}
}
/**
* Execute a query with error handling
*/
async query(text, params = []) {
if (!this.isConnected || !this.pool) {
throw new Error('Database not connected');
}
const start = Date.now();
try {
const result = await this.pool.query(text, params);
const duration = Date.now() - start;
if (process.env.NODE_ENV === 'development') {
console.log('🔍 Query executed:', {
query: text.substring(0, 100) + (text.length > 100 ? '...' : ''),
duration: `${duration}ms`,
rows: result.rowCount
});
}
return result;
} catch (error) {
const duration = Date.now() - start;
console.error('❌ Query error:', {
query: text.substring(0, 100) + (text.length > 100 ? '...' : ''),
duration: `${duration}ms`,
error: error.message
});
throw error;
}
}
/**
* Get a client from the pool for transactions
*/
async getClient() {
if (!this.isConnected || !this.pool) {
throw new Error('Database not connected');
}
return await this.pool.connect();
}
/**
* Execute a transaction
*/
async transaction(callback) {
const client = await this.getClient();
try {
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
/**
* Check if database is healthy
*/
async healthCheck() {
try {
const result = await this.query('SELECT 1 as health_check');
return {
healthy: true,
connected: this.isConnected,
poolSize: this.pool ? this.pool.totalCount : 0,
idleCount: this.pool ? this.pool.idleCount : 0,
waitingCount: this.pool ? this.pool.waitingCount : 0
};
} catch (error) {
return {
healthy: false,
connected: false,
error: error.message
};
}
}
/**
* Close database connection
*/
async close() {
if (this.pool) {
console.log('🔌 Closing database connection...');
await this.pool.end();
this.isConnected = false;
this.pool = null;
console.log('✅ Database connection closed');
}
}
/**
* Get connection statistics
*/
getStats() {
if (!this.pool) {
return { connected: false };
}
return {
connected: this.isConnected,
totalCount: this.pool.totalCount,
idleCount: this.pool.idleCount,
waitingCount: this.pool.waitingCount,
config: {
max: dbConfig.max,
idleTimeoutMillis: dbConfig.idleTimeoutMillis,
connectionTimeoutMillis: dbConfig.connectionTimeoutMillis
}
};
}
}
// Create singleton instance
const dbConnection = new DatabaseConnection();
module.exports = dbConnection;

View File

@ -0,0 +1,263 @@
const fs = require('fs').promises;
const path = require('path');
const dbConnection = require('./connection');
class DatabaseInitializer {
constructor() {
this.migrationsPath = path.join(__dirname, 'migrations');
}
/**
* Initialize database with all migrations
*/
async initialize() {
try {
console.log('🚀 Starting database initialization...');
// Connect to database
await dbConnection.connect();
// Create migrations table if it doesn't exist
await this.createMigrationsTable();
// Run all migrations
await this.runMigrations();
// Seed initial data if needed
await this.seedData();
console.log('✅ Database initialization completed successfully');
} catch (error) {
console.error('❌ Database initialization failed:', error);
throw error;
}
}
/**
* Create migrations tracking table
*/
async createMigrationsTable() {
const createMigrationsTableSQL = `
CREATE TABLE IF NOT EXISTS migrations (
id SERIAL PRIMARY KEY,
filename VARCHAR(255) NOT NULL UNIQUE,
executed_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`;
await dbConnection.query(createMigrationsTableSQL);
console.log('📋 Migrations table ready');
}
/**
* Get list of migration files
*/
async getMigrationFiles() {
try {
const files = await fs.readdir(this.migrationsPath);
return files
.filter(file => file.endsWith('.sql'))
.sort(); // Ensure migrations run in order
} catch (error) {
console.error('❌ Error reading migrations directory:', error);
return [];
}
}
/**
* Get executed migrations from database
*/
async getExecutedMigrations() {
try {
const result = await dbConnection.query('SELECT filename FROM migrations ORDER BY executed_at');
return result.rows.map(row => row.filename);
} catch (error) {
console.error('❌ Error fetching executed migrations:', error);
return [];
}
}
/**
* Run all pending migrations
*/
async runMigrations() {
const migrationFiles = await this.getMigrationFiles();
const executedMigrations = await this.getExecutedMigrations();
const pendingMigrations = migrationFiles.filter(
file => !executedMigrations.includes(file)
);
if (pendingMigrations.length === 0) {
console.log('📝 No pending migrations to run');
return;
}
console.log(`📝 Running ${pendingMigrations.length} pending migrations...`);
for (const migrationFile of pendingMigrations) {
await this.runMigration(migrationFile);
}
}
/**
* Run a single migration
*/
async runMigration(filename) {
try {
console.log(`🔄 Running migration: ${filename}`);
const migrationPath = path.join(this.migrationsPath, filename);
const migrationSQL = await fs.readFile(migrationPath, 'utf8');
// Execute migration in a transaction
await dbConnection.transaction(async (client) => {
// Execute the migration SQL
await client.query(migrationSQL);
// Record the migration as executed
await client.query(
'INSERT INTO migrations (filename) VALUES ($1)',
[filename]
);
});
console.log(`✅ Migration completed: ${filename}`);
} catch (error) {
console.error(`❌ Migration failed: ${filename}`, error);
throw error;
}
}
/**
* Seed initial data
*/
async seedData() {
try {
// Check if we need to seed data (only in development)
if (process.env.NODE_ENV !== 'development') {
console.log('🌱 Skipping seed data (not in development mode)');
return;
}
// Check if users already exist
const userCount = await dbConnection.query('SELECT COUNT(*) FROM users');
if (parseInt(userCount.rows[0].count) > 0) {
console.log('🌱 Skipping seed data (users already exist)');
return;
}
console.log('🌱 Seeding initial data...');
// Create a test user (only in development)
const bcrypt = require('bcrypt');
const testPassword = await bcrypt.hash('TestPassword123!', 12);
await dbConnection.query(`
INSERT INTO users (email, password_hash, is_verified)
VALUES ($1, $2, $3)
`, ['test@example.com', testPassword, true]);
// Get the test user ID for sample bookmarks
const testUser = await dbConnection.query(
'SELECT id FROM users WHERE email = $1',
['test@example.com']
);
const testUserId = testUser.rows[0].id;
// Create sample bookmarks
const sampleBookmarks = [
{
title: 'Google',
url: 'https://www.google.com',
folder: 'Search Engines',
add_date: new Date()
},
{
title: 'GitHub',
url: 'https://github.com',
folder: 'Development',
add_date: new Date()
},
{
title: 'Stack Overflow',
url: 'https://stackoverflow.com',
folder: 'Development',
add_date: new Date()
}
];
for (const bookmark of sampleBookmarks) {
await dbConnection.query(`
INSERT INTO bookmarks (user_id, title, url, folder, add_date)
VALUES ($1, $2, $3, $4, $5)
`, [testUserId, bookmark.title, bookmark.url, bookmark.folder, bookmark.add_date]);
}
console.log('✅ Seed data created successfully');
console.log('👤 Test user: test@example.com / TestPassword123!');
} catch (error) {
console.error('❌ Seeding failed:', error);
// Don't throw error for seeding failures in production
if (process.env.NODE_ENV === 'development') {
throw error;
}
}
}
/**
* Reset database (drop all tables) - USE WITH CAUTION
*/
async reset() {
if (process.env.NODE_ENV === 'production') {
throw new Error('Database reset is not allowed in production');
}
console.log('⚠️ Resetting database...');
await dbConnection.query('DROP TABLE IF EXISTS bookmarks CASCADE');
await dbConnection.query('DROP TABLE IF EXISTS users CASCADE');
await dbConnection.query('DROP TABLE IF EXISTS migrations CASCADE');
await dbConnection.query('DROP FUNCTION IF EXISTS update_updated_at_column() CASCADE');
console.log('🗑️ Database reset completed');
// Re-initialize
await this.initialize();
}
/**
* Get database status
*/
async getStatus() {
try {
const health = await dbConnection.healthCheck();
const migrationFiles = await this.getMigrationFiles();
const executedMigrations = await this.getExecutedMigrations();
return {
...health,
migrations: {
total: migrationFiles.length,
executed: executedMigrations.length,
pending: migrationFiles.length - executedMigrations.length,
files: migrationFiles,
executed_files: executedMigrations
}
};
} catch (error) {
return {
healthy: false,
error: error.message
};
}
}
}
// Create singleton instance
const dbInitializer = new DatabaseInitializer();
module.exports = dbInitializer;

View File

@ -0,0 +1,38 @@
-- Migration: Create users table
-- Description: Creates the users table with all necessary fields for authentication and user management
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
email VARCHAR(255) UNIQUE NOT NULL,
password_hash VARCHAR(255) NOT NULL,
is_verified BOOLEAN DEFAULT FALSE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
last_login TIMESTAMP WITH TIME ZONE,
verification_token VARCHAR(255),
reset_token VARCHAR(255),
reset_expires TIMESTAMP WITH TIME ZONE
);
-- Create indexes for performance optimization
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
CREATE INDEX IF NOT EXISTS idx_users_verification_token ON users(verification_token) WHERE verification_token IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_users_reset_token ON users(reset_token) WHERE reset_token IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_users_is_verified ON users(is_verified);
CREATE INDEX IF NOT EXISTS idx_users_created_at ON users(created_at);
-- Create trigger to automatically update updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';
CREATE TRIGGER update_users_updated_at
BEFORE UPDATE ON users
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();

View File

@ -0,0 +1,34 @@
-- Migration: Create bookmarks table
-- Description: Creates the bookmarks table with user association and all necessary fields
CREATE TABLE IF NOT EXISTS bookmarks (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
title VARCHAR(500) NOT NULL,
url TEXT NOT NULL,
folder VARCHAR(255) DEFAULT '',
add_date TIMESTAMP WITH TIME ZONE NOT NULL,
last_modified TIMESTAMP WITH TIME ZONE,
icon TEXT,
status VARCHAR(20) DEFAULT 'unknown' CHECK (status IN ('unknown', 'valid', 'invalid', 'testing', 'duplicate')),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for performance optimization
CREATE INDEX IF NOT EXISTS idx_bookmarks_user_id ON bookmarks(user_id);
CREATE INDEX IF NOT EXISTS idx_bookmarks_folder ON bookmarks(user_id, folder);
CREATE INDEX IF NOT EXISTS idx_bookmarks_status ON bookmarks(user_id, status);
CREATE INDEX IF NOT EXISTS idx_bookmarks_url ON bookmarks(user_id, url);
CREATE INDEX IF NOT EXISTS idx_bookmarks_title ON bookmarks(user_id, title);
CREATE INDEX IF NOT EXISTS idx_bookmarks_add_date ON bookmarks(user_id, add_date DESC);
CREATE INDEX IF NOT EXISTS idx_bookmarks_created_at ON bookmarks(created_at);
-- Create composite index for common queries
CREATE INDEX IF NOT EXISTS idx_bookmarks_user_folder_date ON bookmarks(user_id, folder, add_date DESC);
-- Create trigger to automatically update updated_at timestamp
CREATE TRIGGER update_bookmarks_updated_at
BEFORE UPDATE ON bookmarks
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();

View File

@ -0,0 +1,255 @@
const dbConnection = require('./connection');
/**
* Database utility functions for common operations
*/
class DatabaseUtils {
/**
* Check if a table exists
*/
static async tableExists(tableName) {
try {
const result = await dbConnection.query(`
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = $1
);
`, [tableName]);
return result.rows[0].exists;
} catch (error) {
console.error(`Error checking if table ${tableName} exists:`, error);
return false;
}
}
/**
* Get table row count
*/
static async getTableCount(tableName) {
try {
const result = await dbConnection.query(`SELECT COUNT(*) FROM ${tableName}`);
return parseInt(result.rows[0].count);
} catch (error) {
console.error(`Error getting count for table ${tableName}:`, error);
return 0;
}
}
/**
* Check database connectivity with detailed diagnostics
*/
static async diagnostics() {
const diagnostics = {
timestamp: new Date().toISOString(),
connection: null,
tables: {},
performance: {}
};
try {
// Test basic connectivity
const start = Date.now();
const health = await dbConnection.healthCheck();
const connectionTime = Date.now() - start;
diagnostics.connection = {
...health,
responseTime: `${connectionTime}ms`
};
if (health.healthy) {
// Check table existence and counts
const tables = ['users', 'bookmarks', 'migrations'];
for (const table of tables) {
const exists = await this.tableExists(table);
const count = exists ? await this.getTableCount(table) : 0;
diagnostics.tables[table] = {
exists,
count
};
}
// Performance test
const perfStart = Date.now();
await dbConnection.query('SELECT 1');
diagnostics.performance.simpleQuery = `${Date.now() - perfStart}ms`;
}
} catch (error) {
diagnostics.connection = {
healthy: false,
error: error.message
};
}
return diagnostics;
}
/**
* Validate database schema
*/
static async validateSchema() {
const validation = {
valid: true,
errors: [],
warnings: []
};
try {
// Check required tables exist
const requiredTables = ['users', 'bookmarks'];
for (const table of requiredTables) {
const exists = await this.tableExists(table);
if (!exists) {
validation.valid = false;
validation.errors.push(`Required table '${table}' does not exist`);
}
}
// Check users table structure
if (await this.tableExists('users')) {
const userColumns = await this.getTableColumns('users');
const requiredUserColumns = [
'id', 'email', 'password_hash', 'is_verified',
'created_at', 'updated_at'
];
for (const column of requiredUserColumns) {
if (!userColumns.includes(column)) {
validation.valid = false;
validation.errors.push(`Required column '${column}' missing from users table`);
}
}
}
// Check bookmarks table structure
if (await this.tableExists('bookmarks')) {
const bookmarkColumns = await this.getTableColumns('bookmarks');
const requiredBookmarkColumns = [
'id', 'user_id', 'title', 'url', 'folder',
'add_date', 'created_at', 'updated_at'
];
for (const column of requiredBookmarkColumns) {
if (!bookmarkColumns.includes(column)) {
validation.valid = false;
validation.errors.push(`Required column '${column}' missing from bookmarks table`);
}
}
}
// Check indexes exist
const indexes = await this.getIndexes();
const requiredIndexes = [
'idx_users_email',
'idx_bookmarks_user_id'
];
for (const index of requiredIndexes) {
if (!indexes.includes(index)) {
validation.warnings.push(`Recommended index '${index}' is missing`);
}
}
} catch (error) {
validation.valid = false;
validation.errors.push(`Schema validation failed: ${error.message}`);
}
return validation;
}
/**
* Get table columns
*/
static async getTableColumns(tableName) {
try {
const result = await dbConnection.query(`
SELECT column_name
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = $1
ORDER BY ordinal_position;
`, [tableName]);
return result.rows.map(row => row.column_name);
} catch (error) {
console.error(`Error getting columns for table ${tableName}:`, error);
return [];
}
}
/**
* Get database indexes
*/
static async getIndexes() {
try {
const result = await dbConnection.query(`
SELECT indexname
FROM pg_indexes
WHERE schemaname = 'public'
ORDER BY indexname;
`);
return result.rows.map(row => row.indexname);
} catch (error) {
console.error('Error getting database indexes:', error);
return [];
}
}
/**
* Clean up expired tokens and sessions
*/
static async cleanup() {
try {
console.log('🧹 Starting database cleanup...');
// Clean up expired reset tokens
const resetResult = await dbConnection.query(`
UPDATE users
SET reset_token = NULL, reset_expires = NULL
WHERE reset_expires < NOW()
`);
if (resetResult.rowCount > 0) {
console.log(`🧹 Cleaned up ${resetResult.rowCount} expired reset tokens`);
}
// Clean up old unverified accounts (older than 7 days)
const unverifiedResult = await dbConnection.query(`
DELETE FROM users
WHERE is_verified = FALSE
AND created_at < NOW() - INTERVAL '7 days'
`);
if (unverifiedResult.rowCount > 0) {
console.log(`🧹 Cleaned up ${unverifiedResult.rowCount} old unverified accounts`);
}
console.log('✅ Database cleanup completed');
} catch (error) {
console.error('❌ Database cleanup failed:', error);
throw error;
}
}
/**
* Backup database (development only)
*/
static async backup() {
if (process.env.NODE_ENV === 'production') {
throw new Error('Backup function is not available in production');
}
// This would typically use pg_dump or similar
console.log('💾 Database backup functionality would be implemented here');
console.log('💡 In production, use proper backup tools like pg_dump');
}
}
module.exports = DatabaseUtils;

View File

@ -0,0 +1 @@
# This file ensures the middleware directory is tracked by git

View File

@ -0,0 +1 @@
# This file ensures the models directory is tracked by git

View File

@ -0,0 +1 @@
# This file ensures the routes directory is tracked by git

View File

@ -0,0 +1 @@
# This file ensures the services directory is tracked by git

117
backend/test-db-setup.js Normal file
View File

@ -0,0 +1,117 @@
/**
* Test script to verify database setup is working correctly
* This script tests the database connection, schema creation, and basic operations
*/
require('dotenv').config();
const dbConnection = require('./src/database/connection');
const dbInitializer = require('./src/database/init');
const dbUtils = require('./src/database/utils');
async function testDatabaseSetup() {
console.log('🧪 Testing Database Setup...\n');
let testsPassed = 0;
let testsTotal = 0;
function test(name, condition) {
testsTotal++;
if (condition) {
console.log(`${name}`);
testsPassed++;
} else {
console.log(`${name}`);
}
}
try {
// Test 1: Database Connection
console.log('1. Testing database connection...');
await dbConnection.connect();
test('Database connection established', dbConnection.isConnected);
// Test 2: Health Check
console.log('\n2. Testing health check...');
const health = await dbConnection.healthCheck();
test('Health check returns healthy status', health.healthy);
test('Connection pool is configured', health.poolSize >= 0);
// Test 3: Database Initialization
console.log('\n3. Testing database initialization...');
await dbInitializer.initialize();
const status = await dbInitializer.getStatus();
test('Database initialization completed', status.healthy);
test('Migrations table exists', status.migrations.total >= 0);
// Test 4: Schema Validation
console.log('\n4. Testing schema validation...');
const validation = await dbUtils.validateSchema();
test('Schema validation passes', validation.valid);
test('Required tables exist', validation.errors.length === 0);
// Test 5: Table Operations
console.log('\n5. Testing table operations...');
const usersExist = await dbUtils.tableExists('users');
const bookmarksExist = await dbUtils.tableExists('bookmarks');
test('Users table exists', usersExist);
test('Bookmarks table exists', bookmarksExist);
// Test 6: Basic Query Operations
console.log('\n6. Testing query operations...');
const queryResult = await dbConnection.query('SELECT 1 as test');
test('Basic query execution works', queryResult.rows[0].test === 1);
// Test 7: Transaction Support
console.log('\n7. Testing transaction support...');
let transactionWorked = false;
try {
await dbConnection.transaction(async (client) => {
await client.query('SELECT 1');
transactionWorked = true;
});
} catch (error) {
console.error('Transaction test failed:', error);
}
test('Transaction support works', transactionWorked);
// Test 8: Connection Pool Stats
console.log('\n8. Testing connection pool...');
const stats = dbConnection.getStats();
test('Connection pool statistics available', stats.connected);
test('Pool configuration is correct', stats.config && stats.config.max > 0);
// Summary
console.log('\n📊 Test Results:');
console.log(`✅ Passed: ${testsPassed}/${testsTotal}`);
console.log(`❌ Failed: ${testsTotal - testsPassed}/${testsTotal}`);
if (testsPassed === testsTotal) {
console.log('\n🎉 All database tests passed! Setup is working correctly.');
} else {
console.log('\n⚠ Some tests failed. Check the output above for details.');
}
// Display diagnostics
console.log('\n🔍 Database Diagnostics:');
const diagnostics = await dbUtils.diagnostics();
console.log(JSON.stringify(diagnostics, null, 2));
} catch (error) {
console.error('\n❌ Database test failed:', error.message);
if (error.code === 'ECONNREFUSED') {
console.log('\n💡 PostgreSQL is not running. To fix this:');
console.log('1. Install PostgreSQL if not already installed');
console.log('2. Start PostgreSQL service');
console.log('3. Create database: createdb bookmark_manager');
console.log('4. Update .env file with correct credentials');
}
process.exit(1);
} finally {
await dbConnection.close();
}
}
// Run the test
testDatabaseSetup();