diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..3df642b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,43 @@ +# Dependabot Configuration +# +# This file configures Dependabot to automatically check for and create PRs +# for dependency updates. It helps keep your project dependencies secure and up-to-date. +# +# Dependabot will: +# - Check for updates daily +# - Create PRs for security updates immediately +# - Group related updates together +# - Use the same package manager (pnpm) as your project + +version: 2 +updates: + # Enable version updates for npm/pnpm packages + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "daily" # Check for updates daily + time: "04:00" # At 4 AM UTC + open-pull-requests-limit: 10 # Maximum number of open PRs + reviewers: + - "frckbrice" # Add your GitHub username here + labels: + - "dependencies" + - "automated" + # Group updates by dependency type + groups: + production-dependencies: + patterns: + - "*" + update-types: + - "minor" + - "patch" + # Ignore specific packages if needed + ignore: + # Example: Ignore major version updates for a specific package + # - dependency-name: "package-name" + # update-types: ["version-update:semver-major"] + + # Commit message preferences + commit-message: + prefix: "chore" + include: "scope" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..c7edeeb --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,52 @@ +# Pull Request + +## Description + + +## Type of Change + +- [ ] 🐛 Bug fix (non-breaking change which fixes an issue) +- [ ] ✨ New feature (non-breaking change which adds functionality) +- [ ] 💥 Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] 📚 Documentation update +- [ ] 🎨 Code style/formatting changes +- [ ] ♻️ Code refactoring +- [ ] ⚡ Performance improvement +- [ ] ✅ Test updates +- [ ] 🔧 Build/config changes + +## Related Issues + +Closes # +Related to # + +## Changes Made + +- +- +- + +## Testing + +- [ ] Unit tests pass +- [ ] Integration tests pass +- [ ] Manual testing completed +- [ ] Type checking passes +- [ ] Linting passes + +## Checklist + +- [ ] My code follows the project's style guidelines +- [ ] I have performed a self-review of my code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have updated the documentation accordingly +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes +- [ ] Any dependent changes have been merged and published + +## Screenshots (if applicable) + + +## Additional Notes + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..f3f2efd --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,123 @@ +# Continuous Integration Workflow +# +# This workflow runs on every push and pull request to ensure code quality. +# It performs the following checks: +# 1. Type checking (TypeScript compilation without emitting files) +# 2. Linting (ESLint) +# 3. Testing (Jest) +# 4. Building (TypeScript compilation) +# +# The workflow uses pnpm as the package manager and supports multiple Node.js versions. + +name: CI + +# Trigger the workflow on push and pull requests +on: + push: + branches: + - main + - develop + - 'feature/**' + - 'fix/**' + - 'hotfix/**' + - 'release/**' + pull_request: + branches: + - main + - develop + +# Allow only one concurrent workflow per branch +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + # Main CI job that runs all checks + ci: + name: CI Checks + runs-on: ubuntu-latest + + # Strategy to test against multiple Node.js versions + strategy: + matrix: + node-version: [20.x, 22.x] + fail-fast: false + + steps: + # Checkout the repository code + - name: Checkout code + uses: actions/checkout@v4 + + # Setup pnpm package manager + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 8 + + # Setup Node.js with the version from matrix + - name: Setup Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: 'pnpm' + + # Install dependencies + - name: Install dependencies + run: pnpm install --frozen-lockfile + + # Run TypeScript type checking + - name: Type check + run: pnpm check + + # Run ESLint to check code quality + - name: Lint + run: pnpm lint + continue-on-error: false + + # Run tests with Jest + - name: Test + run: pnpm test + env: + NODE_ENV: test + + # Build the TypeScript project + - name: Build + run: pnpm build + + # Upload test coverage reports (optional, for coverage visualization) + - name: Upload coverage reports + if: matrix.node-version == '20.x' + uses: codecov/codecov-action@v4 + with: + file: ./coverage/lcov.info + flags: unittests + name: codecov-umbrella + fail_ci_if_error: false + + # Separate job for security checks (dependencies vulnerability scanning) + security: + name: Security Audit + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 8 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20.x' + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + # Run pnpm audit to check for known vulnerabilities + - name: Run security audit + run: pnpm audit --audit-level=moderate + continue-on-error: true diff --git a/.github/workflows/database.yml b/.github/workflows/database.yml new file mode 100644 index 0000000..0e07177 --- /dev/null +++ b/.github/workflows/database.yml @@ -0,0 +1,69 @@ +# Database Migration Workflow +# +# This workflow handles database migrations and schema checks. +# It can be used to: +# - Validate database schema changes +# - Run migrations in a test environment +# - Generate migration files +# +# Note: This workflow requires database credentials to be set as GitHub secrets. +# Required secrets: +# - DATABASE_URL: PostgreSQL connection string + +name: Database + +# Trigger manually or on specific file changes +on: + workflow_dispatch: # Allows manual triggering + push: + branches: + - main + - develop + paths: + - 'config/database/**' + - 'drizzle/**' + - 'drizzle.config.ts' + +jobs: + # Validate database schema + validate-schema: + name: Validate Schema + runs-on: ubuntu-latest + + # Skip if database URL is not available + if: ${{ secrets.DATABASE_URL != '' }} + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 8 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20.x' + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + # Generate migration files to check for schema changes + - name: Generate migrations + run: pnpm db:generate + env: + DATABASE_URL: ${{ secrets.DATABASE_URL }} + + # Check if there are uncommitted migration files + - name: Check for uncommitted migrations + run: | + if [ -n "$(git status --porcelain drizzle/)" ]; then + echo "⚠️ Uncommitted migration files detected!" + git status + exit 1 + else + echo "✅ All migrations are committed" + fi diff --git a/.github/workflows/dependabot-auto-merge.yml b/.github/workflows/dependabot-auto-merge.yml new file mode 100644 index 0000000..e2bab6f --- /dev/null +++ b/.github/workflows/dependabot-auto-merge.yml @@ -0,0 +1,57 @@ +# Dependabot Auto-Merge Workflow +# +# This workflow automatically merges Dependabot PRs that pass all CI checks. +# It helps keep dependencies up-to-date with minimal manual intervention. +# +# Requirements: +# - Dependabot must be enabled in repository settings +# - Branch protection rules should allow auto-merge + +name: Dependabot Auto-Merge + +on: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + # Auto-merge Dependabot PRs that pass CI + auto-merge: + name: Auto-merge Dependabot PRs + runs-on: ubuntu-latest + + # Only run for Dependabot PRs + if: github.actor == 'dependabot[bot]' + + steps: + - name: Wait for CI to complete + uses: lewagon/wait-on-check-action@v1.3.4 + with: + ref: ${{ github.event.pull_request.head.sha }} + check-regexp: '^CI' + repo-token: ${{ secrets.GITHUB_TOKEN }} + wait-interval: 10 + allowed-conclusions: success,neutral + + # Approve the PR + - name: Approve PR + uses: actions/github-script@v7 + with: + script: | + github.rest.pulls.createReview({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.issue.number, + event: 'APPROVE' + }) + + # Enable auto-merge + - name: Enable auto-merge + uses: actions/github-script@v7 + with: + script: | + github.rest.pulls.merge({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.issue.number, + merge_method: 'squash' + }) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..3028ba5 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,80 @@ +# Release Workflow +# +# This workflow is triggered when a release is created or when code is pushed to main. +# It builds the production-ready code and can be extended to deploy to your hosting platform. +# +# Usage: +# - Create a release tag (e.g., v1.0.0) to trigger this workflow +# - Or push to main branch to build the latest version + +name: Release + +# Trigger on release creation or push to main +on: + release: + types: [created, published] + push: + branches: + - main + tags: + - 'v*' + +# Prevent concurrent runs +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: false + +jobs: + # Build job for production + build: + name: Build Production + runs-on: ubuntu-latest + + steps: + # Checkout the repository code + - name: Checkout code + uses: actions/checkout@v4 + + # Setup pnpm + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 8 + + # Setup Node.js + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20.x' + cache: 'pnpm' + + # Install dependencies + - name: Install dependencies + run: pnpm install --frozen-lockfile --prod=false + + # Run type check + - name: Type check + run: pnpm check + + # Build production bundle + - name: Build production + run: pnpm build:prod + env: + NODE_ENV: production + + # Create build artifact + - name: Upload build artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist/ + retention-days: 30 + + # Optional: Upload to GitHub Releases + - name: Upload to release + if: github.event_name == 'release' + uses: softprops/action-gh-release@v2 + with: + files: dist/** + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..ab96a7c --- /dev/null +++ b/jest.config.js @@ -0,0 +1,28 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + roots: ['/tests', '/src', '/services', '/middlewares', '/config'], + testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'], + transform: { + '^.+\\.ts$': 'ts-jest', + }, + collectCoverageFrom: [ + 'src/**/*.ts', + 'services/**/*.ts', + 'middlewares/**/*.ts', + 'config/**/*.ts', + '!**/*.d.ts', + '!**/node_modules/**', + '!**/dist/**', + '!**/drizzle/**', + '!**/tests/**', + ], + coverageDirectory: 'coverage', + coverageReporters: ['text', 'lcov', 'html'], + moduleNameMapper: { + '^@/(.*)$': '/src/$1', + }, + setupFilesAfterEnv: ['/tests/setup.ts'], + testTimeout: 10000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], +}; diff --git a/scripts/git-flow.sh b/scripts/git-flow.sh new file mode 100755 index 0000000..2b69d35 --- /dev/null +++ b/scripts/git-flow.sh @@ -0,0 +1,236 @@ +#!/bin/bash + +# Git Flow Helper Script +# This script provides helper functions for the Git Flow branching model +# +# Git Flow Branching Model: +# - main: Production-ready code (always deployable) +# - develop: Integration branch for features (default development branch) +# - feature/*: New features (branched from develop, merged back to develop) +# - release/*: Release preparation (branched from develop, merged to main and develop) +# - hotfix/*: Critical production fixes (branched from main, merged to main and develop) +# +# Usage examples: +# ./scripts/git-flow.sh feature start my-feature +# ./scripts/git-flow.sh feature finish my-feature +# ./scripts/git-flow.sh release start 1.0.0 +# ./scripts/git-flow.sh hotfix start critical-bug + +set -e + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Function to print colored messages +print_info() { + echo -e "${GREEN}[INFO]${NC} $1" +} + +print_warn() { + echo -e "${YELLOW}[WARN]${NC} $1" +} + +print_error() { + echo -e "${RED}[ERROR]${NC} $1" +} + +# Ensure we're in a git repository +if ! git rev-parse --git-dir > /dev/null 2>&1; then + print_error "Not a git repository" + exit 1 +fi + +# Ensure develop branch exists +ensure_develop() { + if ! git show-ref --verify --quiet refs/heads/develop; then + print_warn "develop branch does not exist. Creating it from main..." + git checkout -b develop main + print_info "Created develop branch" + fi +} + +# Start a feature branch +# Usage: feature start +feature_start() { + local feature_name=$1 + if [ -z "$feature_name" ]; then + print_error "Feature name is required" + echo "Usage: $0 feature start " + exit 1 + fi + + ensure_develop + git checkout develop + git pull origin develop 2>/dev/null || true + git checkout -b "feature/$feature_name" develop + print_info "Created and switched to feature/$feature_name" +} + +# Finish a feature branch +# Usage: feature finish +feature_finish() { + local feature_name=$1 + if [ -z "$feature_name" ]; then + print_error "Feature name is required" + echo "Usage: $0 feature finish " + exit 1 + fi + + local branch="feature/$feature_name" + if ! git show-ref --verify --quiet refs/heads/$branch; then + print_error "Branch $branch does not exist" + exit 1 + fi + + git checkout $branch + git checkout develop + git merge --no-ff $branch -m "Merge feature/$feature_name into develop" + git branch -d $branch + print_info "Merged and deleted $branch" +} + +# Start a release branch +# Usage: release start +release_start() { + local version=$1 + if [ -z "$version" ]; then + print_error "Version is required" + echo "Usage: $0 release start " + exit 1 + fi + + ensure_develop + git checkout develop + git pull origin develop 2>/dev/null || true + git checkout -b "release/$version" develop + print_info "Created and switched to release/$version" +} + +# Finish a release branch +# Usage: release finish +release_finish() { + local version=$1 + if [ -z "$version" ]; then + print_error "Version is required" + echo "Usage: $0 release finish " + exit 1 + fi + + local branch="release/$version" + if ! git show-ref --verify --quiet refs/heads/$branch; then + print_error "Branch $branch does not exist" + exit 1 + fi + + git checkout $branch + git checkout main + git merge --no-ff $branch -m "Release $version" + git tag -a "v$version" -m "Release version $version" + + git checkout develop + git merge --no-ff $branch -m "Merge release/$version into develop" + + git branch -d $branch + print_info "Merged release/$version to main and develop, created tag v$version" +} + +# Start a hotfix branch +# Usage: hotfix start +hotfix_start() { + local hotfix_name=$1 + if [ -z "$hotfix_name" ]; then + print_error "Hotfix name is required" + echo "Usage: $0 hotfix start " + exit 1 + fi + + git checkout main + git pull origin main 2>/dev/null || true + git checkout -b "hotfix/$hotfix_name" main + print_info "Created and switched to hotfix/$hotfix_name" +} + +# Finish a hotfix branch +# Usage: hotfix finish +hotfix_finish() { + local hotfix_name=$1 + if [ -z "$hotfix_name" ]; then + print_error "Hotfix name is required" + echo "Usage: $0 hotfix finish " + exit 1 + fi + + local branch="hotfix/$hotfix_name" + if ! git show-ref --verify --quiet refs/heads/$branch; then + print_error "Branch $branch does not exist" + exit 1 + fi + + git checkout $branch + git checkout main + git merge --no-ff $branch -m "Hotfix $hotfix_name" + + ensure_develop + git checkout develop + git merge --no-ff $branch -m "Merge hotfix/$hotfix_name into develop" + + git branch -d $branch + print_info "Merged hotfix/$hotfix_name to main and develop" +} + +# Main command handler +case "$1" in + feature) + case "$2" in + start) + feature_start "$3" + ;; + finish) + feature_finish "$3" + ;; + *) + print_error "Unknown feature command: $2" + echo "Usage: $0 feature {start|finish} " + exit 1 + ;; + esac + ;; + release) + case "$2" in + start) + release_start "$3" + ;; + finish) + release_finish "$3" + ;; + *) + print_error "Unknown release command: $2" + echo "Usage: $0 release {start|finish} " + exit 1 + ;; + esac + ;; + hotfix) + case "$2" in + start) + hotfix_start "$3" + ;; + finish) + hotfix_finish "$3" + ;; + *) + print_error "Unknown hotfix command: $2" + echo "Usage: $0 hotfix {start|finish} " + exit 1 + ;; + esac + ;; + *) + print_error "Unknown command: $1" + echo "Usage: $0 {feature|release|hotfix} {start|finish} " + exit 1 + ;; +esac diff --git a/scripts/git-flow.ts b/scripts/git-flow.ts new file mode 100644 index 0000000..7d15c2c --- /dev/null +++ b/scripts/git-flow.ts @@ -0,0 +1,206 @@ +/** + * Git Flow Helper - TypeScript Version + * + * This module provides TypeScript utilities for working with the Git Flow branching model. + * + * Git Flow Branching Model Overview: + * ================================== + * + * Branch Types: + * - main: Production-ready code (always deployable, protected) + * - develop: Integration branch for features (default development branch) + * - feature/*: New features (branched from develop, merged back to develop) + * - release/*: Release preparation (branched from develop, merged to main and develop) + * - hotfix/*: Critical production fixes (branched from main, merged to main and develop) + * + * Workflow: + * 1. Features: develop -> feature/name -> develop + * 2. Releases: develop -> release/version -> main + develop + * 3. Hotfixes: main -> hotfix/name -> main + develop + * + * Usage: + * - Use the shell script (scripts/git-flow.sh) for command-line operations + * - This TypeScript file provides programmatic access if needed + */ + +import { execSync } from 'child_process'; + +/** + * Git Flow Branch Types + */ +export enum BranchType { + FEATURE = 'feature', + RELEASE = 'release', + HOTFIX = 'hotfix', + FIX = 'fix', + CHORE = 'chore', + DOCS = 'docs', + TEST = 'test', + REFACTOR = 'refactor', + PERF = 'perf', + BUILD = 'build', + CI = 'ci', + CD = 'cd', + VENDOR = 'vendor', +} + +/** + * Git Flow Operations + */ +export enum GitFlowOperation { + START = 'start', + FINISH = 'finish', +} + +/** + * Execute a git command and return the output + */ +function execGit(command: string): string { + try { + return execSync(command, { encoding: 'utf-8', stdio: 'pipe' }).trim(); + } catch (error) { + throw new Error(`Git command failed: ${command}`); + } +} + +/** + * Check if a branch exists + */ +export function branchExists(branchName: string): boolean { + try { + execGit(`git show-ref --verify --quiet refs/heads/${branchName}`); + return true; + } catch { + return false; + } +} + +/** + * Get current branch name + */ +export function getCurrentBranch(): string { + return execGit('git rev-parse --abbrev-ref HEAD'); +} + +/** + * Ensure develop branch exists, create if it doesn't + */ +export function ensureDevelopBranch(): void { + if (!branchExists('develop')) { + console.log('Creating develop branch from main...'); + execGit('git checkout -b develop main'); + console.log('✓ Created develop branch'); + } +} + +/** + * Start a feature branch + * @param featureName - Name of the feature (without 'feature/' prefix) + */ +export function startFeature(featureName: string): void { + ensureDevelopBranch(); + const branchName = `feature/${featureName}`; + + if (branchExists(branchName)) { + throw new Error(`Feature branch ${branchName} already exists`); + } + + execGit('git checkout develop'); + execGit(`git checkout -b ${branchName} develop`); + console.log(`✓ Created and switched to ${branchName}`); +} + +/** + * Finish a feature branch (merge to develop and delete) + * @param featureName - Name of the feature (without 'feature/' prefix) + */ +export function finishFeature(featureName: string): void { + const branchName = `feature/${featureName}`; + + if (!branchExists(branchName)) { + throw new Error(`Feature branch ${branchName} does not exist`); + } + + execGit(`git checkout ${branchName}`); + execGit('git checkout develop'); + execGit(`git merge --no-ff ${branchName} -m "Merge ${branchName} into develop"`); + execGit(`git branch -d ${branchName}`); + console.log(`✓ Merged and deleted ${branchName}`); +} + +/** + * Start a release branch + * @param version - Version number (e.g., '1.0.0') + */ +export function startRelease(version: string): void { + ensureDevelopBranch(); + const branchName = `release/${version}`; + + if (branchExists(branchName)) { + throw new Error(`Release branch ${branchName} already exists`); + } + + execGit('git checkout develop'); + execGit(`git checkout -b ${branchName} develop`); + console.log(`✓ Created and switched to ${branchName}`); +} + +/** + * Finish a release branch (merge to main and develop, create tag) + * @param version - Version number (e.g., '1.0.0') + */ +export function finishRelease(version: string): void { + const branchName = `release/${version}`; + + if (!branchExists(branchName)) { + throw new Error(`Release branch ${branchName} does not exist`); + } + + execGit(`git checkout ${branchName}`); + execGit('git checkout main'); + execGit(`git merge --no-ff ${branchName} -m "Release ${version}"`); + execGit(`git tag -a v${version} -m "Release version ${version}"`); + + execGit('git checkout develop'); + execGit(`git merge --no-ff ${branchName} -m "Merge release/${version} into develop"`); + execGit(`git branch -d ${branchName}`); + console.log(`✓ Merged release/${version} to main and develop, created tag v${version}`); +} + +/** + * Start a hotfix branch + * @param hotfixName - Name of the hotfix (without 'hotfix/' prefix) + */ +export function startHotfix(hotfixName: string): void { + const branchName = `hotfix/${hotfixName}`; + + if (branchExists(branchName)) { + throw new Error(`Hotfix branch ${branchName} already exists`); + } + + execGit('git checkout main'); + execGit(`git checkout -b ${branchName} main`); + console.log(`✓ Created and switched to ${branchName}`); +} + +/** + * Finish a hotfix branch (merge to main and develop) + * @param hotfixName - Name of the hotfix (without 'hotfix/' prefix) + */ +export function finishHotfix(hotfixName: string): void { + const branchName = `hotfix/${hotfixName}`; + + if (!branchExists(branchName)) { + throw new Error(`Hotfix branch ${branchName} does not exist`); + } + + execGit(`git checkout ${branchName}`); + execGit('git checkout main'); + execGit(`git merge --no-ff ${branchName} -m "Hotfix ${hotfixName}"`); + + ensureDevelopBranch(); + execGit('git checkout develop'); + execGit(`git merge --no-ff ${branchName} -m "Merge hotfix/${hotfixName} into develop"`); + execGit(`git branch -d ${branchName}`); + console.log(`✓ Merged hotfix/${hotfixName} to main and develop`); +} diff --git a/src/config/env.ts b/src/config/env.ts new file mode 100644 index 0000000..63111d1 --- /dev/null +++ b/src/config/env.ts @@ -0,0 +1,72 @@ +/** + * Environment Configuration + * + * Validates and provides type-safe access to environment variables. + * All environment variables are validated on application startup using Zod. + * + * Usage: + * import { env } from './config/env'; + * const port = env.PORT; + * const isProduction = env.NODE_ENV === 'production'; + * + * Benefits: + * - Type-safe environment variable access + * - Validation on startup (fails fast if invalid) + * - Default values for optional variables + * - Clear error messages for missing/invalid variables + */ +import { z } from 'zod'; +import dotenv from 'dotenv'; + +dotenv.config(); + +/** + * Environment variable schema with validation rules + * - Required variables will cause startup failure if missing + * - Optional variables have defaults or are marked optional + * - Type transformations (e.g., PORT to number) are handled automatically + */ +const envSchema = z.object({ + NODE_ENV: z.enum(['development', 'production', 'test']).default('development'), + PORT: z.string().transform(Number).default('5500'), + + // Database - at least one must be provided + DATABASE_URL: z.string().url().optional(), + DATAAPI_URL: z.string().url().optional(), + + // Session - required for security + SESSION_SECRET: z.string().min(32, 'SESSION_SECRET must be at least 32 characters'), + + // Cloudinary - optional (only needed for file uploads) + CLOUDINARY_CLOUD_NAME: z.string().optional(), + CLOUDINARY_API_KEY: z.string().optional(), + CLOUDINARY_API_SECRET: z.string().optional(), + + // Email - optional (only needed for email functionality) + GMAIL_USER: z.string().email().optional(), + GMAIL_PASS: z.string().optional(), + + // CORS - defaults to common development origins + ALLOWED_ORIGINS: z.string().default('http://localhost:3000,http://localhost:3001'), +}); + +type Env = z.infer; + +/** + * Validates environment variables on startup + * Throws an error with detailed messages if validation fails + */ +function validateEnv(): Env { + try { + return envSchema.parse(process.env); + } catch (error) { + if (error instanceof z.ZodError) { + const missingVars = error.errors.map(e => `${e.path.join('.')}: ${e.message}`).join('\n'); + throw new Error(`Environment validation failed:\n${missingVars}`); + } + throw error; + } +} + +// Validated and type-safe environment variables +export const env = validateEnv(); diff --git a/src/controllers/auth.controller.ts b/src/controllers/auth.controller.ts new file mode 100644 index 0000000..fed7fe3 --- /dev/null +++ b/src/controllers/auth.controller.ts @@ -0,0 +1,80 @@ +import { Request, Response } from 'express'; +import { compare } from 'bcrypt'; +import drizzleService from '../../services/drizzle-services'; +import { AuthenticationError } from '../utils/errors'; +import { logger } from '../middlewares/logger'; + +export class AuthController { + async login(req: Request, res: Response): Promise { + const { username, password } = req.body; + + const user = await drizzleService.getUserByUsername(username); + + if (!user) { + throw new AuthenticationError('Invalid username or password'); + } + + const passwordMatch = await compare(password, user.password); + + if (!passwordMatch) { + throw new AuthenticationError('Invalid username or password'); + } + + // Create session data + req.session.user = { + id: user.id as string, + username: user.username, + fullName: user.fullName, + email: user.email, + role: user.role, + libraryId: String(user.libraryId), + }; + + logger.info('User logged in', { userId: user.id, username: user.username }); + + res.status(200).json({ + success: true, + data: { + id: user.id, + username: user.username, + fullName: user.fullName, + email: user.email, + role: user.role, + libraryId: user.libraryId, + }, + }); + } + + async getSession(req: Request, res: Response): Promise { + if (req.session.user) { + res.status(200).json({ + success: true, + data: req.session.user, + }); + } else { + res.status(200).json({ + success: true, + data: null, + }); + } + } + + async logout(req: Request, res: Response): Promise { + return new Promise((resolve, reject) => { + req.session.destroy((err) => { + if (err) { + reject(err); + return; + } + res.clearCookie('connect.sid'); + res.status(200).json({ + success: true, + message: 'Logged out successfully', + }); + resolve(); + }); + }); + } +} + +export const authController = new AuthController(); diff --git a/src/middlewares/auth.ts b/src/middlewares/auth.ts new file mode 100644 index 0000000..3abf148 --- /dev/null +++ b/src/middlewares/auth.ts @@ -0,0 +1,26 @@ +import { Request, Response, NextFunction } from 'express'; +import { AuthenticationError, AuthorizationError } from '../utils/errors'; + +export const requireAuth = (req: Request, res: Response, next: NextFunction) => { + if (!req.session?.user) { + throw new AuthenticationError('Authentication required'); + } + next(); +}; + +export const requireRole = (...roles: string[]) => { + return (req: Request, res: Response, next: NextFunction) => { + if (!req.session?.user) { + throw new AuthenticationError('Authentication required'); + } + + if (!roles.includes(req.session.user.role)) { + throw new AuthorizationError(`Access denied. Required roles: ${roles.join(', ')}`); + } + + next(); + }; +}; + +export const requireSuperAdmin = requireRole('super_admin'); +export const requireLibraryAdmin = requireRole('library_admin', 'super_admin'); diff --git a/src/middlewares/error-handler.ts b/src/middlewares/error-handler.ts new file mode 100644 index 0000000..a8e48be --- /dev/null +++ b/src/middlewares/error-handler.ts @@ -0,0 +1,72 @@ +import { Request, Response, NextFunction } from 'express'; +import { ZodError } from 'zod'; +import { AppError, ValidationError } from '../utils/errors'; +import { env } from '../config/env'; +import { logger } from './logger'; + +export const errorHandler = ( + err: Error | AppError | ZodError, + req: Request, + res: Response, + next: NextFunction +): void => { + // Log error + logger.error('Error occurred', { + error: err.message, + stack: err.stack, + method: req.method, + path: req.path, + ip: req.ip, + userAgent: req.get('user-agent'), + }); + + // Handle Zod validation errors + if (err instanceof ZodError) { + const formattedErrors = err.errors.reduce((acc, error) => { + const path = error.path.join('.'); + if (!acc[path]) { + acc[path] = []; + } + acc[path].push(error.message); + return acc; + }, {} as Record); + + res.status(400).json({ + success: false, + error: 'Validation failed', + errors: formattedErrors, + timestamp: new Date().toISOString(), + }); + return; + } + + // Handle custom AppError + if (err instanceof AppError) { + const response: any = { + success: false, + error: err.message, + code: err.code, + timestamp: new Date().toISOString(), + }; + + if (err instanceof ValidationError && err.errors) { + response.errors = err.errors; + } + + if (env.NODE_ENV === 'development') { + response.stack = err.stack; + } + + res.status(err.statusCode).json(response); + return; + } + + // Handle unexpected errors + res.status(500).json({ + success: false, + error: 'Internal server error', + message: env.NODE_ENV === 'development' ? err.message : 'An unexpected error occurred', + timestamp: new Date().toISOString(), + ...(env.NODE_ENV === 'development' && { stack: err.stack }), + }); +}; diff --git a/src/middlewares/logger.ts b/src/middlewares/logger.ts new file mode 100644 index 0000000..2d72869 --- /dev/null +++ b/src/middlewares/logger.ts @@ -0,0 +1,67 @@ +import { createLogger, format, transports } from 'winston'; +import { env } from '../config/env'; + +const logFormat = format.combine( + format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), + format.errors({ stack: true }), + format.splat(), + format.json() +); + +const consoleFormat = format.combine( + format.colorize(), + format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), + format.printf((info: any) => { + const { timestamp, level, message, ...meta } = info; + const metaStr = Object.keys(meta).length ? JSON.stringify(meta, null, 2) : ''; + return `${timestamp} [${level}]: ${message} ${metaStr}`; + }) +); + +export const logger = createLogger({ + level: env.NODE_ENV === 'production' ? 'info' : 'debug', + format: logFormat, + defaultMeta: { service: 'library-backend' }, + transports: [ + new transports.File({ filename: 'logs/error.log', level: 'error' }), + new transports.File({ filename: 'logs/combined.log' }), + ], +}); + +if (env.NODE_ENV !== 'production') { + logger.add( + new transports.Console({ + format: consoleFormat, + }) + ); +} + +// Request logging middleware +export const requestLogger = (req: any, res: any, next: any) => { + const start = Date.now(); + const path = req.path; + let capturedJsonResponse: Record | undefined = undefined; + + const originalResJson = res.json; + res.json = function (bodyJson: any, ...args: any[]) { + capturedJsonResponse = bodyJson; + return originalResJson.apply(res, [bodyJson, ...args]); + }; + + res.on('finish', () => { + const duration = Date.now() - start; + if (path.startsWith('/api')) { + logger.info('HTTP Request', { + method: req.method, + path, + statusCode: res.statusCode, + duration: `${duration}ms`, + ip: req.ip, + userAgent: req.get('user-agent'), + ...(capturedJsonResponse && { response: capturedJsonResponse }), + }); + } + }); + + next(); +}; diff --git a/src/middlewares/validation.ts b/src/middlewares/validation.ts new file mode 100644 index 0000000..6caae42 --- /dev/null +++ b/src/middlewares/validation.ts @@ -0,0 +1,72 @@ +import { Request, Response, NextFunction } from 'express'; +import { ZodSchema, ZodError } from 'zod'; +import { ValidationError } from '../utils/errors'; + +export const validate = (schema: ZodSchema) => { + return (req: Request, res: Response, next: NextFunction) => { + try { + schema.parse(req.body); + next(); + } catch (error) { + if (error instanceof ZodError) { + const formattedErrors = error.errors.reduce((acc, err) => { + const path = err.path.join('.'); + if (!acc[path]) { + acc[path] = []; + } + acc[path].push(err.message); + return acc; + }, {} as Record); + + throw new ValidationError('Validation failed', formattedErrors); + } + next(error); + } + }; +}; + +export const validateQuery = (schema: ZodSchema) => { + return (req: Request, res: Response, next: NextFunction) => { + try { + schema.parse(req.query); + next(); + } catch (error) { + if (error instanceof ZodError) { + const formattedErrors = error.errors.reduce((acc, err) => { + const path = err.path.join('.'); + if (!acc[path]) { + acc[path] = []; + } + acc[path].push(err.message); + return acc; + }, {} as Record); + + throw new ValidationError('Query validation failed', formattedErrors); + } + next(error); + } + }; +}; + +export const validateParams = (schema: ZodSchema) => { + return (req: Request, res: Response, next: NextFunction) => { + try { + schema.parse(req.params); + next(); + } catch (error) { + if (error instanceof ZodError) { + const formattedErrors = error.errors.reduce((acc, err) => { + const path = err.path.join('.'); + if (!acc[path]) { + acc[path] = []; + } + acc[path].push(err.message); + return acc; + }, {} as Record); + + throw new ValidationError('Parameter validation failed', formattedErrors); + } + next(error); + } + }; +}; diff --git a/src/routes/admin.routes.ts b/src/routes/admin.routes.ts new file mode 100644 index 0000000..3d1f1ad --- /dev/null +++ b/src/routes/admin.routes.ts @@ -0,0 +1,176 @@ +import type { Express } from "express"; +import drizzleService from "../../services/drizzle-services"; +import { apiHandler } from "./shared"; + +export function registerAdminRoutes(app: Express, global_path: string) { + // Analytics endpoints + app.get(`${global_path}/admin/dashboard/stats`, async (req, res) => { + try { + const libraryId = req.session.user?.libraryId; + if (!libraryId) { + return res.status(400).json({ error: 'Library ID required' }); + } + + const stories = await drizzleService.getStories({ libraryId }); + const mediaItems = await drizzleService.getMediaItems({ libraryId }); + const events = await drizzleService.getEvents({ libraryId }); + const messages = await drizzleService.getContactMessages({ libraryId }); + + const stats = { + totalStories: stories.length, + publishedStories: stories.filter(s => s.isPublished).length, + totalMedia: mediaItems.length, + approvedMedia: mediaItems.filter(m => m.isApproved).length, + totalEvents: events.length, + upcomingEvents: events.filter(e => new Date(e.eventDate) > new Date()).length, + totalMessages: messages.length, + unreadMessages: messages.filter(m => !m.isRead).length + }; + + return res.status(200).json(stats); + } catch (error) { + console.error("Error fetching dashboard stats:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.get(`${global_path}/admin/dashboard/analytics`, async (req, res) => { + try { + const libraryId = req.session.user?.libraryId; + if (!libraryId) { + return res.status(400).json({ error: 'Library ID required' }); + } + + const analytics = await drizzleService.getAnalytics({ libraryId }); + + // Process analytics data for charts + const last30Days = Array.from({ length: 30 }, (_, i) => { + const date = new Date(); + date.setDate(date.getDate() - i); + return date.toISOString().split('T')[0]; + }).reverse(); + + const visitorData = last30Days.map(date => { + const dayAnalytics = analytics.filter(a => + a.date && new Date(a.date).toISOString().split('T')[0] === date + ); + const totalViews = dayAnalytics.reduce((sum, a) => sum + (a.views || 0), 0); + + return { + date: new Date(date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' }), + visitors: totalViews, + uniqueVisitors: Math.floor(totalViews * 0.7) // Approximate unique visitors + }; + }); + + const contentData = [ + { name: 'Stories', views: analytics.filter(a => a.storyId).reduce((sum, a) => sum + (a.views || 0), 0), engagement: 75 }, + { name: 'Gallery', views: analytics.filter(a => a.pageType === 'gallery').reduce((sum, a) => sum + (a.views || 0), 0), engagement: 85 }, + { name: 'Library Profile', views: analytics.filter(a => a.pageType === 'library_profile').reduce((sum, a) => sum + (a.views || 0), 0), engagement: 65 } + ]; + + const engagementData = last30Days.slice(-7).map(date => ({ + date: new Date(date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' }), + avgTimeSpent: Math.floor(Math.random() * 300) + 120, // Mock data for demo + interactionRate: Math.floor(Math.random() * 40) + 60 + })); + + const topPerformers = { + topStory: 'Featured Exhibition', + topStoryViews: Math.max(...analytics.filter(a => a.storyId).map(a => a.views || 0), 0), + topGallery: 'Main Collection', + topGalleryViews: Math.max(...analytics.filter(a => a.pageType === 'gallery').map(a => a.views || 0), 0), + avgTimeOnPage: '4:32', + avgTimeIncrease: 12 + }; + + return res.status(200).json({ + visitorData, + contentData, + engagementData, + topPerformers + }); + } catch (error) { + console.error("Error fetching analytics:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.get(`${global_path}/admin/dashboard/activity`, async (req, res) => { + try { + const libraryId = req.session.user?.libraryId; + if (!libraryId) { + return res.status(400).json({ error: 'Library ID required' }); + } + + const stories = await drizzleService.getStories({ libraryId, limit: 5 }); + const messages = await drizzleService.getContactMessages({ libraryId, limit: 5 }); + const events = await drizzleService.getEvents({ libraryId, limit: 5 }); + + const recentActivity = [ + ...stories.map(s => ({ + type: 'story', + title: `Story updated: ${s.title}`, + timestamp: s.updatedAt || s.createdAt, + status: s.isPublished ? 'published' : 'draft' + })), + ...messages.map(m => ({ + type: 'message', + title: `New inquiry: ${m.subject}`, + timestamp: m.createdAt, + status: m.isRead ? 'read' : 'unread' + })), + ...events.map(e => ({ + type: 'event', + title: `Event: ${e.title}`, + timestamp: e.createdAt, + status: e.isPublished ? 'published' : 'draft' + })) + ].sort((a, b) => + new Date(b.timestamp ?? 0).getTime() - new Date(a.timestamp ?? 0).getTime() + ).slice(0, 10); + + return res.status(200).json(recentActivity); + } catch (error) { + console.error("Error fetching recent activity:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Admin: Get all galleries + app.get(`${global_path}/admin/galleries`, apiHandler(async (req, res) => { + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + const galleries = await drizzleService.getGalleries(); + return res.status(200).json(galleries); + })); + + // Delete Image Route + app.delete(global_path + '/admin/upload/image/:publicId', apiHandler(async (req, res) => { + const { publicId } = req.params; + const { cloudinaryService } = await import("../../config/bucket-storage/cloudinary"); + + if (!cloudinaryService.isReady()) { + return res.status(503).json({ error: 'Cloudinary not configured' }); + } + + try { + // Decode the public ID (it may be URL encoded) + const decodedPublicId = decodeURIComponent(publicId); + const success = await cloudinaryService.deleteImage(decodedPublicId); + + if (success) { + return res.status(200).json({ success: true, message: 'Image deleted successfully' }); + } else { + return res.status(404).json({ error: 'Image not found or already deleted' }); + } + } catch (error) { + console.error("Image deletion error:", error); + return res.status(500).json({ + error: 'Failed to delete image', + message: error instanceof Error ? error.message : String(error) + }); + } + })); +} diff --git a/src/routes/auth.routes.ts b/src/routes/auth.routes.ts new file mode 100644 index 0000000..9925f4b --- /dev/null +++ b/src/routes/auth.routes.ts @@ -0,0 +1,76 @@ +import type { Express, Request, Response, NextFunction } from "express"; +import { compare } from "bcrypt"; +import drizzleService from "../../services/drizzle-services"; +import { validate } from "../../utils/validations"; +import { loginSchema } from "../validations/auth.schemas"; +import { AuthenticationError } from "../utils/errors"; +import { authLimiter } from '../../middlewares/rate-limiters'; + +export function registerAuthRoutes(app: Express, global_path: string) { + // Authentication routes + app.post(`${global_path}/auth/login`, authLimiter, validate(loginSchema), async (req, res, next) => { + try { + const { username, password } = req.body; + + const user = await drizzleService.getUserByUsername(username); + + if (!user) { + throw new AuthenticationError('Invalid username or password'); + } + + // Compare password using bcrypt + const passwordMatch = await compare(password, user.password); + + if (!passwordMatch) { + throw new AuthenticationError('Invalid username or password'); + } + + // Create session data + req.session.user = { + id: user.id as string, + username: user.username, + fullName: user.fullName, + email: user.email, + role: user.role, + libraryId: String(user.libraryId) + }; + + return res.status(200).json({ + success: true, + data: { + id: user.id, + username: user.username, + fullName: user.fullName, + email: user.email, + role: user.role, + libraryId: user.libraryId + } + }); + } catch (error) { + next(error); + } + }); + + app.get(`${global_path}/auth/session`, (req, res) => { + if (req.session.user) { + return res.status(200).json({ + success: true, + data: req.session.user + }); + } + return res.status(200).json({ + success: true, + data: null + }); + }); + + app.post(`${global_path}/auth/logout`, (req, res, next) => { + req.session.destroy((err) => { + if (err) { + return next(err); + } + res.clearCookie('connect.sid'); + return res.status(200).json({ success: true, message: 'Logged out successfully' }); + }); + }); +} diff --git a/src/routes/contact.routes.ts b/src/routes/contact.routes.ts new file mode 100644 index 0000000..ef1f41a --- /dev/null +++ b/src/routes/contact.routes.ts @@ -0,0 +1,109 @@ +import type { Express } from "express"; +import drizzleService from "../../services/drizzle-services"; +import { sendResponseEmail } from "../../services/email-service"; +import { contactLimiter, emailLimiter } from '../../middlewares/rate-limiters'; +import { jsonApiMiddleware, apiHandler } from "./shared"; + +export function registerContactRoutes(app: Express, global_path: string) { + // Contact messages endpoints + app.get(`${global_path}/contact-messages`, async (req, res) => { + try { + const libraryId = req.session.user?.libraryId; + const options = libraryId ? { libraryId } : {}; + + const messages = await drizzleService.getContactMessages(options); + return res.status(200).json(messages); + } catch (error) { + console.error("Error fetching contact messages:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.post(`${global_path}/contact-messages`, contactLimiter, async (req, res) => { + try { + const message = await drizzleService.createContactMessage(req.body); + return res.status(201).json(message); + } catch (error) { + console.error("Error creating contact message:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.patch(`${global_path}/contact-messages/:id`, async (req, res) => { + try { + const messageId = req.params.id; + const updatedMessage = await drizzleService.updateContactMessage(messageId, req.body); + + if (!updatedMessage) { + return res.status(404).json({ error: 'Contact message not found' }); + } + + return res.status(200).json(updatedMessage); + } catch (error) { + console.error("Error updating contact message:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Reply to a contact message + app.post(`${global_path}/contact-messages/:id/reply`, emailLimiter, jsonApiMiddleware, apiHandler(async (req, res) => { + if (!req.session.user || req.session.user.role !== 'library_admin') { + return res.status(403).json({ error: "Unauthorized" }); + } + + const messageId = req.params.id; + const { subject, message } = req.body; + + if (!subject || !message) { + return res.status(400).json({ error: "Subject and message are required" }); + } + + // Get the original message + const originalMessage = await drizzleService.getContactMessage(messageId); + if (!originalMessage || originalMessage.libraryId !== req.session.user.libraryId) { + return res.status(404).json({ error: "Message not found" }); + } + + // Get library information + const library = await drizzleService.getLibrary(req.session.user.libraryId!); + if (!library) { + return res.status(404).json({ error: "Library not found" }); + } + + try { + // Send email response to visitor + const emailSent = await sendResponseEmail({ + visitorEmail: originalMessage.email, + visitorName: originalMessage.name, + originalSubject: originalMessage.subject, + responseSubject: subject, + responseMessage: message, + libraryName: library.name, + libraryEmail: "noreply@library.com" + }); + + if (!emailSent) { + return res.status(500).json({ error: "Failed to send email response" }); + } + + // Create message response record + const response = await drizzleService.createMessageResponse({ + contactMessageId: messageId, + respondedBy: req.session.user.id, + subject, + message + }); + + // Update contact message status + await drizzleService.updateContactMessage(messageId, { + responseStatus: 'responded', + isRead: true + }); + + res.json(response); + } catch (error) { + console.error('Error sending reply:', error); + res.status(500).json({ error: "Failed to send reply" }); + } + })); +} diff --git a/src/routes/events.routes.ts b/src/routes/events.routes.ts new file mode 100644 index 0000000..a1e725a --- /dev/null +++ b/src/routes/events.routes.ts @@ -0,0 +1,105 @@ +import type { Express } from "express"; +import drizzleService from "../../services/drizzle-services"; +import { upload, apiHandler, uploadImageToCloudinary } from "./shared"; + +export function registerEventsRoutes(app: Express, global_path: string) { + app.post(`${global_path}/events`, upload.single('eventImage'), apiHandler(async (req, res) => { + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + + const libraryId = req.session.user.libraryId; + if (!libraryId) { + return res.status(400).json({ error: 'Library ID required' }); + } + + // Handle event image upload + let imageUrl = req.body.imageUrl || null; + if (req.file) { + try { + imageUrl = await uploadImageToCloudinary(req.file, 'events'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload event image' }); + } + } + + const eventData = { + ...req.body, + libraryId, + imageUrl, + isApproved: false, // New events need approval + createdAt: new Date() + }; + + const event = await drizzleService.createEvent(eventData); + return res.status(201).json(event); + })); + + // Update event with image upload + app.patch(`${global_path}/events/:id`, upload.single('eventImage'), apiHandler(async (req, res) => { + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + + const eventId = req.params.id; + const existingEvent = await drizzleService.getEvent(eventId); + + if (!existingEvent) { + return res.status(404).json({ error: 'Event not found' }); + } + + // Check ownership + if (req.session.user.role === 'library_admin' && existingEvent.libraryId !== req.session.user.libraryId) { + return res.status(403).json({ error: 'Unauthorized - you can only edit events for your library' }); + } + + // Handle event image upload + let imageUrl = req.body.imageUrl || existingEvent.imageUrl; + if (req.file) { + try { + imageUrl = await uploadImageToCloudinary(req.file, 'events'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload event image' }); + } + } + + const updateData = { + ...req.body, + imageUrl, + updatedAt: new Date() + }; + + const updatedEvent = await drizzleService.updateEvent(eventId, updateData); + return res.status(200).json(updatedEvent); + })); + + // Events endpoints + app.get(`${global_path}/events`, async (req, res) => { + try { + const libraryId = req.session.user?.libraryId; + const options = libraryId ? { libraryId } : {}; + + const events = await drizzleService.getEvents(options); + return res.status(200).json(events); + } catch (error) { + console.error("Error fetching events:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.delete(`${global_path}/events/:id`, async (req, res) => { + try { + const eventId = req.params.id; + const deleted = await drizzleService.deleteEvent(eventId); + + if (!deleted) { + return res.status(404).json({ error: 'Event not found' }); + } + + return res.status(200).json({ success: true }); + } catch (error) { + console.error("Error deleting event:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); +} diff --git a/src/routes/index.ts b/src/routes/index.ts new file mode 100644 index 0000000..8335b11 --- /dev/null +++ b/src/routes/index.ts @@ -0,0 +1,25 @@ +import type { Express } from "express"; +import { registerAuthRoutes } from "./auth.routes"; +import { registerStoriesRoutes } from "./stories.routes"; +import { registerLibrariesRoutes } from "./libraries.routes"; +import { registerMediaRoutes } from "./media.routes"; +import { registerEventsRoutes } from "./events.routes"; +import { registerAdminRoutes } from "./admin.routes"; +import { registerSuperAdminRoutes } from "./superadmin.routes"; +import { registerContactRoutes } from "./contact.routes"; +import { registerMaintenanceRoutes } from "./maintenance.routes"; +import { registerSettingsRoutes } from "./settings.routes"; + +export function registerAllRoutes(app: Express, global_path: string) { + // Register all route modules + registerAuthRoutes(app, global_path); + registerStoriesRoutes(app, global_path); + registerLibrariesRoutes(app, global_path); + registerMediaRoutes(app, global_path); + registerEventsRoutes(app, global_path); + registerAdminRoutes(app, global_path); + registerSuperAdminRoutes(app, global_path); + registerContactRoutes(app, global_path); + registerMaintenanceRoutes(app, global_path); + registerSettingsRoutes(app, global_path); +} diff --git a/src/routes/libraries.routes.ts b/src/routes/libraries.routes.ts new file mode 100644 index 0000000..89ecbc8 --- /dev/null +++ b/src/routes/libraries.routes.ts @@ -0,0 +1,130 @@ +import type { Express } from "express"; +import drizzleService from "../../services/drizzle-services"; +import { NotFoundError, AuthorizationError } from "../utils/errors"; +import { requireSuperAdmin, requireLibraryAdmin } from "../../middlewares/auth"; +import { upload, apiHandler, uploadImageToCloudinary } from "./shared"; + +export function registerLibrariesRoutes(app: Express, global_path: string) { + app.post(`${global_path}/libraries`, requireSuperAdmin, upload.fields([ + { name: 'logo', maxCount: 1 }, + { name: 'featuredImage', maxCount: 1 } + ]), apiHandler(async (req, res) => { + + const files = req.files as { [fieldname: string]: Express.Multer.File[] }; + + // Handle logo upload + let logoUrl = req.body.logoUrl || null; + if (files && files['logo'] && files['logo'][0]) { + try { + logoUrl = await uploadImageToCloudinary(files['logo'][0], 'libraries/logos'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload logo' }); + } + } + + // Handle featured image upload + let featuredImageUrl = req.body.featuredImageUrl || null; + if (files && files['featuredImage'] && files['featuredImage'][0]) { + try { + featuredImageUrl = await uploadImageToCloudinary(files['featuredImage'][0], 'libraries/featured'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload featured image' }); + } + } + + const libraryData = { + ...req.body, + logoUrl, + featuredImageUrl, + isApproved: false, // New libraries need approval + createdAt: new Date() + }; + + const library = await drizzleService.createLibrary(libraryData); + return res.status(201).json({ + success: true, + data: library + }); + })); + + // Update library with image upload + app.patch(`${global_path}/libraries/:id`, requireLibraryAdmin, upload.fields([ + { name: 'logo', maxCount: 1 }, + { name: 'featuredImage', maxCount: 1 } + ]), apiHandler(async (req, res) => { + const libraryId = req.params.id; + const existingLibrary = await drizzleService.getLibrary(libraryId); + + if (!existingLibrary) { + throw new NotFoundError('Library'); + } + + // Check if library admin is updating their own library + if (req.session.user!.role === 'library_admin' && req.session.user!.libraryId !== libraryId) { + throw new AuthorizationError('You can only edit your own library'); + } + + const files = req.files as { [fieldname: string]: Express.Multer.File[] }; + + // Handle logo upload + let logoUrl = req.body.logoUrl || existingLibrary.logoUrl; + if (files && files['logo'] && files['logo'][0]) { + try { + logoUrl = await uploadImageToCloudinary(files['logo'][0], 'libraries/logos'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload logo' }); + } + } + + // Handle featured image upload + let featuredImageUrl = req.body.featuredImageUrl || existingLibrary.featuredImageUrl; + if (files && files['featuredImage'] && files['featuredImage'][0]) { + try { + featuredImageUrl = await uploadImageToCloudinary(files['featuredImage'][0], 'libraries/featured'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload featured image' }); + } + } + + const updateData = { + ...req.body, + logoUrl, + featuredImageUrl, + updatedAt: new Date() + }; + + const updatedLibrary = await drizzleService.updateLibrary(libraryId, updateData); + return res.status(200).json({ + success: true, + data: updatedLibrary + }); + })); + + // Librarys endpoints + app.get(`${global_path}/libraries`, async (req, res) => { + try { + const libraries = await drizzleService.getLibraries(); + return res.status(200).json(libraries); + } catch (error) { + console.error("Error fetching libraries:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Get individual library + app.get(`${global_path}/libraries/:id`, async (req, res) => { + try { + const libraryId = req.params.id; + const library = await drizzleService.getLibrary(libraryId); + + if (!library) { + return res.status(404).json({ error: 'Library not found' }); + } + + return res.status(200).json(library); + } catch (error) { + console.error(`Error fetching library with ID ${req.params.id}:`, error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); +} diff --git a/src/routes/maintenance.routes.ts b/src/routes/maintenance.routes.ts new file mode 100644 index 0000000..9c7ae0d --- /dev/null +++ b/src/routes/maintenance.routes.ts @@ -0,0 +1,208 @@ +import type { Express } from "express"; +import drizzleService from "../../services/drizzle-services"; + +// Maintenance state (in production, this should be in a database or Redis) +let maintenanceMode = false; +const maintenanceWindows: any[] = []; +const backupHistory: any[] = [ + { id: 1, type: 'full', size: '2.3 GB', created: new Date('2025-06-18T02:00:00Z'), status: 'completed' }, + { id: 2, type: 'database', size: '890 MB', created: new Date('2025-06-17T02:00:00Z'), status: 'completed' }, + { id: 3, type: 'files', size: '1.4 GB', created: new Date('2025-06-16T02:00:00Z'), status: 'completed' }, + { id: 4, type: 'database', size: '885 MB', created: new Date('2025-06-15T02:00:00Z'), status: 'completed' }, +]; + +export function registerMaintenanceRoutes(app: Express, global_path: string) { + // health check endpoint + app.get(`${global_path}/health`, async (req, res) => { + try { + const isHealthy = await drizzleService.healthCheck(); + res.json({ + status: isHealthy ? 'system healthy' : 'system unhealthy', + timestamp: new Date().toISOString() + }); + } catch (error) { + res.status(500).json({ + status: 'system unhealthy', + error: 'Health check failed', + timestamp: new Date().toISOString() + }); + } + }); + + // Get maintenance status + app.get(`${global_path}/maintenance/status`, async (req, res) => { + try { + const systemHealth = [ + { service: 'Web Server', status: 'healthy', uptime: '15 days, 3 hours', responseTime: 145, lastCheck: new Date() }, + { service: 'Database', status: 'healthy', uptime: '15 days, 3 hours', responseTime: 23, lastCheck: new Date() }, + { service: 'File Storage', status: 'warning', uptime: '2 days, 1 hour', responseTime: 287, lastCheck: new Date() }, + { service: 'Email Service', status: 'healthy', uptime: '15 days, 3 hours', responseTime: 412, lastCheck: new Date() }, + { service: 'CDN', status: 'healthy', uptime: '30 days, 12 hours', responseTime: 89, lastCheck: new Date() }, + ]; + + const systemMetrics = { + cpuUsage: Math.floor(Math.random() * 30) + 15, + memoryUsage: Math.floor(Math.random() * 40) + 50, + diskUsage: Math.floor(Math.random() * 30) + 30, + networkTraffic: '1.2 GB/day' + }; + + return res.status(200).json({ + maintenanceMode, + systemHealth, + systemMetrics, + maintenanceWindows, + backupHistory + }); + } catch (error) { + console.error("Error fetching maintenance status:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Toggle maintenance mode + app.post(`${global_path}/maintenance/toggle`, async (req, res) => { + try { + const { enabled } = req.body; + maintenanceMode = enabled; + + return res.status(200).json({ + success: true, + maintenanceMode, + message: `Maintenance mode ${enabled ? 'enabled' : 'disabled'}` + }); + } catch (error) { + console.error("Error toggling maintenance mode:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Schedule maintenance window + app.post(`${global_path}/maintenance/schedule`, async (req, res) => { + try { + const { title, description, scheduledStart, scheduledEnd, affectedServices } = req.body; + + if (!title || !scheduledStart) { + return res.status(400).json({ error: 'Title and start time are required' }); + } + + const newWindow = { + id: Date.now(), + title, + description, + scheduledStart: new Date(scheduledStart), + scheduledEnd: scheduledEnd ? new Date(scheduledEnd) : null, + affectedServices: affectedServices || [], + status: 'scheduled', + createdAt: new Date() + }; + + maintenanceWindows.push(newWindow); + + return res.status(201).json(newWindow); + } catch (error) { + console.error("Error scheduling maintenance:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Create backup + app.post(`${global_path}/maintenance/backup`, async (req, res) => { + try { + const { type } = req.body; + + if (!['database', 'files', 'full'].includes(type)) { + return res.status(400).json({ error: 'Invalid backup type' }); + } + + // Simulate backup creation + const sizes = { + database: `${Math.floor(Math.random() * 500) + 800} MB`, + files: `${Math.floor(Math.random() * 800) + 1200} MB`, + full: `${Math.floor(Math.random() * 1000) + 2000} MB` + }; + + const newBackup = { + id: Date.now(), + type, + size: sizes[type as keyof typeof sizes], + created: new Date(), + status: 'running' + }; + + backupHistory.unshift(newBackup); + + // Simulate backup completion after 3 seconds + setTimeout(() => { + const backup = backupHistory.find(b => b.id === newBackup.id); + if (backup) { + backup.status = 'completed'; + } + }, 3000); + + return res.status(201).json(newBackup); + } catch (error) { + console.error("Error creating backup:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Get backup history + app.get(`${global_path}/maintenance/backups`, async (req, res) => { + try { + return res.status(200).json(backupHistory); + } catch (error) { + console.error("Error fetching backups:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Refresh system status + app.post(`${global_path}/maintenance/refresh`, async (req, res) => { + try { + // Simulate system check with random variations + const systemHealth = [ + { + service: 'Web Server', + status: 'healthy', + uptime: '15 days, 3 hours', + responseTime: Math.floor(Math.random() * 50) + 120, + lastCheck: new Date() + }, + { + service: 'Database', + status: 'healthy', + uptime: '15 days, 3 hours', + responseTime: Math.floor(Math.random() * 20) + 15, + lastCheck: new Date() + }, + { + service: 'File Storage', + status: Math.random() > 0.8 ? 'warning' : 'healthy', + uptime: '2 days, 1 hour', + responseTime: Math.floor(Math.random() * 100) + 200, + lastCheck: new Date() + }, + { + service: 'Email Service', + status: 'healthy', + uptime: '15 days, 3 hours', + responseTime: Math.floor(Math.random() * 200) + 350, + lastCheck: new Date() + }, + { + service: 'CDN', + status: 'healthy', + uptime: '30 days, 12 hours', + responseTime: Math.floor(Math.random() * 30) + 70, + lastCheck: new Date() + }, + ]; + + return res.status(200).json({ systemHealth }); + } catch (error) { + console.error("Error refreshing system status:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); +} diff --git a/src/routes/media.routes.ts b/src/routes/media.routes.ts new file mode 100644 index 0000000..2f44ded --- /dev/null +++ b/src/routes/media.routes.ts @@ -0,0 +1,148 @@ +import type { Express } from "express"; +import drizzleService from "../../services/drizzle-services"; +import { upload, apiHandler, uploadImageToCloudinary } from "./shared"; + +export function registerMediaRoutes(app: Express, global_path: string) { + // Media endpoints + app.get(`${global_path}/media-items`, async (req, res) => { + try { + // Extract query parameters + const libraryId = req.query.libraryId ? String(req.query.libraryId) : undefined; + const galleryId = req.query.galleryId ? String(req.query.galleryId) : undefined; + + // Handle boolean parameters properly - undefined if not provided, explicit boolean if provided + let approved = undefined; + if (req.query.approved !== undefined) { + approved = req.query.approved === 'true'; + } + + const mediaType = req.query.mediaType ? String(req.query.mediaType) : undefined; + const tags = req.query.tag ? Array.isArray(req.query.tag) ? req.query.tag as string[] : [req.query.tag as string] : undefined; + const limit = req.query.limit ? Number(req.query.limit) : undefined; + const offset = req.query.offset ? Number(req.query.offset) : undefined; + + // Pass parameters to storage method with appropriate naming + const media = await drizzleService.getMediaItems({ + libraryId, + galleryId, + mediaType, + tags, + limit, + offset, + approved // Fixed to use the correct parameter name for the storage interface + }); + + return res.status(200).json(media); + } catch (error) { + console.error("Error fetching media:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Get individual media item + app.get(`${global_path}/media-items/:id`, async (req, res) => { + try { + const mediaId = req.params.id; + const mediaItem = await drizzleService.getMediaItem(mediaId); + + if (!mediaItem) { + return res.status(404).json({ error: 'Media item not found' }); + } + + return res.status(200).json(mediaItem); + } catch (error) { + console.error(`Error fetching media item with ID ${req.params.id}:`, error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.post(`${global_path}/media-items`, upload.single('mediaFile'), apiHandler(async (req, res) => { + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + + const libraryId = req.session.user.libraryId; + if (!libraryId) { + return res.status(400).json({ error: 'Library ID required' }); + } + + // Handle media file upload + let url = req.body.url || null; + if (req.file) { + try { + url = await uploadImageToCloudinary(req.file, 'media'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload media file' }); + } + } + + if (!url) { + return res.status(400).json({ error: 'Media URL or file is required' }); + } + + const mediaData = { + ...req.body, + libraryId, + url, + isApproved: false, // New media needs approval + createdAt: new Date() + }; + + const mediaItem = await drizzleService.createMediaItem(mediaData); + return res.status(201).json(mediaItem); + })); + + // Update media item with image upload + app.patch(`${global_path}/media-items/:id`, upload.single('mediaFile'), apiHandler(async (req, res) => { + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + + const mediaId = req.params.id; + const existingMedia = await drizzleService.getMediaItem(mediaId); + + if (!existingMedia) { + return res.status(404).json({ error: 'Media item not found' }); + } + + // Check ownership + if (req.session.user.role === 'library_admin' && existingMedia.libraryId !== req.session.user.libraryId) { + return res.status(403).json({ error: 'Unauthorized - you can only edit media for your library' }); + } + + // Handle media file upload + let url = req.body.url || existingMedia.url; + if (req.file) { + try { + url = await uploadImageToCloudinary(req.file, 'media'); + } catch (error) { + return res.status(500).json({ error: 'Failed to upload media file' }); + } + } + + const updateData = { + ...req.body, + url, + updatedAt: new Date() + }; + + const updatedMedia = await drizzleService.updateMediaItem(mediaId, updateData); + return res.status(200).json(updatedMedia); + })); + + // Admin: Get all unique media tags + app.get(`${global_path}/admin/media/tags`, apiHandler(async (req, res) => { + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + const mediaItems = await drizzleService.getMediaItems(); + const allTags = new Set(); + mediaItems.forEach(item => { + if (item.tags && Array.isArray(item.tags)) { + item.tags.forEach(tag => allTags.add(tag)); + } + }); + const sortedTags = Array.from(allTags).sort(); + return res.status(200).json(sortedTags); + })); +} diff --git a/src/routes/settings.routes.ts b/src/routes/settings.routes.ts new file mode 100644 index 0000000..0e3afa2 --- /dev/null +++ b/src/routes/settings.routes.ts @@ -0,0 +1,95 @@ +import type { Express } from "express"; + +// Settings state (in production, this should be in a database) +let platformSettings = { + general: { + siteName: "Library Digital Platform", + siteDescription: "A comprehensive platform for library digital experiences", + contactEmail: "contact@library-platform.com", + supportEmail: "support@library-platform.com", + defaultLanguage: "en", + timezone: "UTC", + allowRegistration: true, + requireEmailVerification: true, + maintenanceMode: false, + }, + security: { + passwordMinLength: 8, + requireStrongPasswords: true, + sessionTimeout: 24, + maxLoginAttempts: 5, + enableTwoFactor: false, + allowPasswordReset: true, + }, + email: { + smtpHost: "", + smtpPort: 587, + smtpUser: "", + smtpPassword: "", + fromEmail: "noreply@library-platform.com", + fromName: "Library Platform", + enableEmailNotifications: true, + }, + content: { + maxFileSize: 10, + allowedFileTypes: ["jpg", "jpeg", "png", "gif", "pdf", "mp4", "mp3"], + autoModeration: true, + requireApproval: true, + enableComments: true, + enableRatings: true, + }, + appearance: { + primaryColor: "#2563eb", + secondaryColor: "#64748b", + logo: "", + favicon: "", + customCSS: "", + darkModeEnabled: true, + }, + notifications: { + newUserSignup: true, + newLibraryApplication: true, + contentFlagged: true, + systemAlerts: true, + weeklyReports: true, + emailDigest: false, + } +}; + +export function registerSettingsRoutes(app: Express, global_path: string) { + // Get platform settings + app.get(`${global_path}/settings`, async (req, res) => { + try { + return res.status(200).json(platformSettings); + } catch (error) { + console.error("Error fetching settings:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Update platform settings + app.post(`${global_path}/settings`, async (req, res) => { + try { + const updates = req.body; + + // Merge updates with existing settings + platformSettings = { ...platformSettings, ...updates }; + + return res.status(200).json(platformSettings); + } catch (error) { + console.error("Error updating settings:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Test email configuration + app.post(`${global_path}/settings/test-email`, async (req, res) => { + try { + // Simulate email test + return res.status(200).json({ message: 'Test email sent successfully' }); + } catch (error) { + console.error("Error testing email:", error); + return res.status(500).json({ error: 'Failed to send test email' }); + } + }); +} diff --git a/src/routes/shared.ts b/src/routes/shared.ts new file mode 100644 index 0000000..47e51f7 --- /dev/null +++ b/src/routes/shared.ts @@ -0,0 +1,80 @@ +import type { Request, Response, NextFunction } from "express"; +import multer from 'multer'; +import { cloudinaryService } from "../../config/bucket-storage/cloudinary"; + +// Configure multer for memory storage +export const upload = multer({ + storage: multer.memoryStorage(), + limits: { + fileSize: 10 * 1024 * 1024, // 10MB limit + }, + fileFilter: (req, file, cb) => { + // Allow only image files + if (file.mimetype.startsWith('image/')) { + cb(null, true); + } else { + cb(new Error('Only image files are allowed')); + } + } +}); + +// Helper function to upload image to Cloudinary +export async function uploadImageToCloudinary(file: Express.Multer.File, folder: string): Promise { + if (!cloudinaryService.isReady()) { + throw new Error('Cloudinary not configured'); + } + + try { + const base64Image = `data:${file.mimetype};base64,${file.buffer.toString('base64')}`; + const result = await cloudinaryService.uploadImage(base64Image, { + folder: `library-platform/${folder}`, + }); + return result.url; + } catch (error) { + console.error("Cloudinary upload error:", error); + throw error; + } +} + +// API wrapper to ensure JSON responses +export function apiHandler(handler: (req: Request, res: Response) => Promise) { + return async (req: Request, res: Response, next: NextFunction) => { + // Always set JSON content type + res.setHeader('Content-Type', 'application/json'); + + try { + await handler(req, res); + } catch (error) { + console.error("API Error:", error); + res.status(500).json({ + error: 'Internal server error', + message: error instanceof Error ? error.message : String(error) + }); + } + }; +} + +// Create a middleware to ensure all API responses are JSON +export const jsonApiMiddleware = (req: Request, res: Response, next: NextFunction) => { + // Set the content type before any response is sent + res.setHeader('Content-Type', 'application/json'); + + // Store the original res.send method + const originalSend = res.send; + + // Override the send method to always ensure proper JSON responses + res.send = function (body: any) { + try { + // If body is already a string but not JSON formatted, convert it to a JSON response + if (typeof body === 'string' && (!body.startsWith('{') && !body.startsWith('['))) { + return originalSend.call(this, JSON.stringify({ message: body })); + } + return originalSend.call(this, body); + } catch (error) { + console.error("Error in JSON middleware:", error); + return originalSend.call(this, JSON.stringify({ error: "Internal server error" })); + } + }; + + next(); +}; diff --git a/src/routes/stories.routes.ts b/src/routes/stories.routes.ts new file mode 100644 index 0000000..0493069 --- /dev/null +++ b/src/routes/stories.routes.ts @@ -0,0 +1,241 @@ +import type { Express, Request, Response, NextFunction } from "express"; +import drizzleService from "../../services/drizzle-services"; +import { Story } from "../../config/database/schema"; +import { NotFoundError } from "../utils/errors"; +import { requireAuth } from "../../middlewares/auth"; +import { upload, apiHandler, uploadImageToCloudinary } from "./shared"; + +export function registerStoriesRoutes(app: Express, global_path: string) { + // Admin story management endpoints + app.post(`${global_path}/admin/stories`, requireAuth, upload.single('featuredImage'), apiHandler(async (req, res) => { + const libraryId = req.session.user!.libraryId; + + // Handle featured image upload + let featuredImageUrl = req.body.featuredImageUrl || null; + if (req.file) { + featuredImageUrl = await uploadImageToCloudinary(req.file, 'stories'); + } + + // Create story with library ID from session user + const storyData = { + ...req.body, + libraryId, + featuredImageUrl, + isApproved: false, // New stories need approval + isPublished: req.body.isPublished || false, + isFeatured: false, // Only super admin can feature stories + createdAt: new Date() + }; + + const story = await drizzleService.createStory(storyData); + return res.status(201).json({ + success: true, + data: story + }); + })); + + // Admin story update endpoint + app.patch(`${global_path}/admin/stories/:id`, requireAuth, upload.single('featuredImage'), apiHandler(async (req, res) => { + const storyId = req.params.id; + const existingStory = await drizzleService.getStory(storyId); + + if (!existingStory) { + throw new NotFoundError('Story'); + } + + const libraryId = req.session.user!.libraryId; + + // Check ownership for library admins + if (req.session.user!.role === 'library_admin' && existingStory.libraryId !== libraryId) { + throw new Error('You can only edit stories for your library'); + } + + // Handle featured image upload + let featuredImageUrl = req.body.featuredImageUrl || existingStory.featuredImageUrl; + if (req.file) { + featuredImageUrl = await uploadImageToCloudinary(req.file, 'stories'); + } + + // Preserve approval status - only super admin can change this + const updateData = { + ...req.body, + featuredImageUrl, + isApproved: existingStory.isApproved, // Preserve approval status + updatedAt: new Date() + }; + + const updatedStory = await drizzleService.updateStory(storyId, updateData); + return res.status(200).json({ + success: true, + data: updatedStory + }); + })); + + // Admin get single story endpoint + app.get(`${global_path}/admin/stories/:id`, requireAuth, apiHandler(async (req, res) => { + const storyId = req.params.id; + const story = await drizzleService.getStory(storyId); + + if (!story) { + throw new NotFoundError('Story'); + } + + return res.status(200).json({ + success: true, + data: story + }); + })); + + // Admin timelines endpoints + app.get(`${global_path}/admin/stories/:id/timelines`, apiHandler(async (req, res) => { + // Relaxed authentication for testing + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + + const storyId = req.params.id; + + // Get the story first to verify ownership + const story = await drizzleService.getStory(storyId); + + if (!story) { + return res.status(404).json({ error: 'Story not found' }); + } + + // Skip ownership check for testing + // Get the timelines + const timelines = await drizzleService.getTimelinesByStoryId(storyId); + console.log("Retrieved timelines:", timelines); + return res.status(200).json(timelines); + })); + + app.post(`${global_path}/admin/stories/:id/timelines`, apiHandler(async (req, res) => { + // Relaxed authentication for testing + if (!req.session.user) { + return res.status(403).json({ error: 'Unauthorized - not logged in' }); + } + + const storyId = req.params.id; + + // Get the story first to verify it exists + const story = await drizzleService.getStory(storyId); + + if (!story) { + return res.status(404).json({ error: 'Story not found' }); + } + + // Create timeline data + const timelineData = { + ...req.body, + storyId, + createdAt: new Date(), + updatedAt: new Date() + }; + + console.log("Creating timeline with data:", timelineData); + const timeline = await drizzleService.createTimeline(timelineData); + console.log("Timeline created successfully:", timeline); + return res.status(200).json(timeline); + })); + + // Stories endpoints + app.get(`${global_path}/stories`, async (req, res) => { + try { + // Extract query parameters + const libraryId = req.query.libraryId ? String(req.query.libraryId) : undefined; + + // Handle boolean parameters properly - undefined if not provided, explicit boolean if provided + let published = undefined; + if (req.query.published !== undefined) { + published = req.query.published === 'true'; + } + + let approved = undefined; + if (req.query.approved !== undefined) { + approved = req.query.approved === 'true'; + } + + let featured = undefined; + if (req.query.featured !== undefined) { + featured = req.query.featured === 'true'; + } + + const tags = req.query.tag ? Array.isArray(req.query.tag) ? req.query.tag as string[] : [req.query.tag as string] : undefined; + const limit = req.query.limit ? Number(req.query.limit) : undefined; + const offset = req.query.offset ? Number(req.query.offset) : undefined; + + // Pass parameters to storage method with appropriate naming + const stories = await drizzleService.getStories({ + libraryId, + published, + approved, // Fixed to use the correct parameter name for the storage interface + featured, + tags, + limit, + offset + }); + + return res.status(200).json(stories); + } catch (error) { + console.error("Error fetching stories:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Get individual story + app.get(`${global_path}/stories/:id`, async (req, res, next) => { + try { + // Skip the tags endpoint - special case + if (req.params.id === 'tags') { + const allStories = await drizzleService.getStories(); + const uniqueTags = new Set(); + allStories.forEach((story: Story) => { + if (story.tags && Array.isArray(story.tags)) { + story.tags.forEach((tag: string) => { + if (tag) uniqueTags.add(tag); + }); + } + }); + + return res.status(200).json({ + success: true, + data: Array.from(uniqueTags) + }); + } + + const storyId = req.params.id; + const story = await drizzleService.getStory(storyId); + + if (!story) { + throw new NotFoundError('Story'); + } + + return res.status(200).json({ + success: true, + data: story + }); + } catch (error) { + next(error); + } + }); + + // Get all story tags + app.get(`${global_path}/stories/tags`, async (req, res) => { + try { + const stories = await drizzleService.getStories({ published: true, approved: true }); + const allTags = new Set(); + + stories.forEach(story => { + if (story.tags && Array.isArray(story.tags)) { + story.tags.forEach(tag => allTags.add(tag)); + } + }); + + const sortedTags = Array.from(allTags).sort(); + return res.status(200).json(sortedTags); + } catch (error) { + console.error("Error fetching story tags:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); +} diff --git a/src/routes/superadmin.routes.ts b/src/routes/superadmin.routes.ts new file mode 100644 index 0000000..e0d8a94 --- /dev/null +++ b/src/routes/superadmin.routes.ts @@ -0,0 +1,244 @@ +import type { Express } from "express"; +import drizzleService from "../../services/drizzle-services"; +import bcrypt from "bcrypt"; + +export function registerSuperAdminRoutes(app: Express, global_path: string) { + // Super Admin stats endpoint + app.get(`${global_path}/sadmin/stats`, async (req, res) => { + try { + // Get counts of various entities for the dashboard + const libraries = await drizzleService.getLibraries(); + const stories = await drizzleService.getStories(); + const mediaItems = await drizzleService.getMediaItems(); + const usersPromises = libraries.map(library => drizzleService.getUsersByLibraryId(library.id)); + const usersArrays = await Promise.all(usersPromises); + const users = usersArrays.flat(); + + // Sample placeholder data - in a real app this would come from actual data + const stats = { + totalLibraries: libraries.length, + pendingLibraries: libraries.filter(m => !m.isApproved).length, + totalStories: stories.length, + pendingStories: stories.filter(s => !s.isApproved).length, + totalMedia: mediaItems.length, + uniqueGalleries: Array.from(new Set(mediaItems.map(m => m.galleryId))).length, + totalUsers: users.length, + activeUsers: users.filter(u => u.lastLoginAt !== null).length, + recentActivity: [ + { type: 'user_signup', user: 'National Gallery Admin', timestamp: new Date(Date.now() - 1000 * 60 * 5) }, + { type: 'story_published', user: 'MoMA Admin', title: 'Summer Exhibition Preview', timestamp: new Date(Date.now() - 1000 * 60 * 60) }, + { type: 'media_uploaded', user: 'Louvre Admin', count: 15, timestamp: new Date(Date.now() - 1000 * 60 * 60 * 3) }, + { type: 'library_approved', user: 'Super Admin', library: 'Contemporary Arts Center', timestamp: new Date(Date.now() - 1000 * 60 * 60 * 24) } + ] + }; + + return res.status(200).json(stats); + } catch (error) { + console.error("Error fetching super admin stats:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Super Admin moderation endpoints + app.get(`${global_path}/superadmin/moderation/stories`, async (req, res) => { + try { + // Get stories that need approval + const pendingStories = await drizzleService.getStories({ approved: false }); + return res.status(200).json(pendingStories); + } catch (error) { + console.error("Error fetching pending stories:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.get(`${global_path}/superadmin/moderation/media`, async (req, res) => { + try { + // Get media items that need approval + const pendingMedia = await drizzleService.getMediaItems({ approved: false }); + return res.status(200).json(pendingMedia); + } catch (error) { + console.error("Error fetching pending media:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.patch(`${global_path}/superadmin/stories/:id/approve`, async (req, res) => { + try { + const storyId = req.params.id; + // Fix DB column mismatch by using appropriate naming + const updatedStory = await drizzleService.updateStory(storyId, { + isApproved: true // Keep using isApproved as this is for the DB field name + }); + + if (!updatedStory) { + return res.status(404).json({ error: 'Story not found' }); + } + + return res.status(200).json(updatedStory); + } catch (error) { + console.error("Error approving story:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.patch(`${global_path}/superadmin/stories/:id/reject`, async (req, res) => { + try { + const storyId = req.params.id; + const updatedStory = await drizzleService.updateStory(storyId, { isApproved: false }); + + if (!updatedStory) { + return res.status(404).json({ error: 'Story not found' }); + } + + return res.status(200).json(updatedStory); + } catch (error) { + console.error("Error rejecting story:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.patch(`${global_path}/superadmin/media-items/:id/approve`, async (req, res) => { + try { + const mediaId = req.params.id; + const updatedMedia = await drizzleService.updateMediaItem(mediaId, { isApproved: true }); + + if (!updatedMedia) { + return res.status(404).json({ error: 'Media item not found' }); + } + + return res.status(200).json(updatedMedia); + } catch (error) { + console.error("Error approving media:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.patch(`${global_path}/superadmin/media-items/:id/reject`, async (req, res) => { + try { + const mediaId = req.params.id; + const updatedMedia = await drizzleService.updateMediaItem(mediaId, { isApproved: false }); + + if (!updatedMedia) { + return res.status(404).json({ error: 'Media item not found' }); + } + + return res.status(200).json(updatedMedia); + } catch (error) { + console.error("Error rejecting media:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Super Admin libraries endpoint + app.get(`${global_path}/superadmin/libraries`, async (req, res) => { + try { + const libraries = await drizzleService.getLibraries(); + return res.status(200).json(libraries); + } catch (error) { + console.error("Error fetching libraries:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + // Super Admin users endpoint + app.get(`${global_path}/superadmin/users`, async (req, res) => { + try { + // Get all users across all libraries + const libraries = await drizzleService.getLibraries(); + const usersPromises = libraries.map(library => drizzleService.getUsersByLibraryId(library.id)); + const usersArrays = await Promise.all(usersPromises); + const users = usersArrays.flat(); + + return res.status(200).json(users); + } catch (error) { + console.error("Error fetching users:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.post(`${global_path}/superadmin/users`, async (req, res) => { + try { + const userData = req.body; + + // Validate required fields + if (!userData.username || !userData.password || !userData.email || !userData.fullName || !userData.role) { + return res.status(400).json({ error: 'Missing required fields' }); + } + + const hashedPassword = await bcrypt.hash(userData.password, 10); + + const newUser = await drizzleService.createUser({ + username: userData.username, + password: hashedPassword, + email: userData.email, + fullName: userData.fullName, + role: userData.role, + libraryId: userData.libraryId || null, + isActive: userData.isActive !== undefined ? userData.isActive : true + }); + + return res.status(201).json(newUser); + } catch (error: any) { + console.error("Error creating user:", error); + + // Handle duplicate username/email errors + if (error.code === '23505') { // PostgreSQL unique violation code + if (error.constraint === 'users_username_unique') { + return res.status(409).json({ + error: 'Username already in use. Please choose a different one.' + }); + } + if (error.constraint === 'users_email_unique') { + return res.status(409).json({ + error: 'Email already in use. Please use a different email.' + }); + } + } + + return res.status(500).json({ error: error?.message || 'Internal server error' }); + } + }); + + app.patch(`${global_path}/superadmin/users/:id`, async (req, res) => { + try { + const userId = req.params.id; + const updateData = req.body; + + const updatedUser = await drizzleService.updateUser(userId, updateData); + + if (!updatedUser) { + return res.status(404).json({ error: 'User not found' }); + } + + return res.status(200).json(updatedUser); + } catch (error) { + console.error("Error updating user:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); + + app.post(`${global_path}/superadmin/users/:id/reset-password`, async (req, res) => { + try { + const userId = req.params.id; + const { password } = req.body; + + if (!password) { + return res.status(400).json({ error: 'Password is required' }); + } + + // Hash new password + const hashedPassword = await bcrypt.hash(password, 10); + + const updatedUser = await drizzleService.updateUser(userId, { password: hashedPassword }); + + if (!updatedUser) { + return res.status(404).json({ error: 'User not found' }); + } + + return res.status(200).json({ message: 'Password reset successfully' }); + } catch (error) { + console.error("Error resetting password:", error); + return res.status(500).json({ error: 'Internal server error' }); + } + }); +} diff --git a/src/types/express.d.ts b/src/types/express.d.ts new file mode 100644 index 0000000..51c2da4 --- /dev/null +++ b/src/types/express.d.ts @@ -0,0 +1,16 @@ +import 'express-session'; + +declare module 'express-session' { + interface SessionData { + user?: { + id: string; + username: string; + fullName: string; + email: string; + role: string; + libraryId?: string; + }; + } +} + +export { }; diff --git a/src/utils/errors.ts b/src/utils/errors.ts new file mode 100644 index 0000000..f69c19c --- /dev/null +++ b/src/utils/errors.ts @@ -0,0 +1,103 @@ +/** + * Custom Error Classes + * + * These error classes provide a consistent way to handle errors throughout + * the application. They extend the base AppError class and are automatically + * handled by the error handler middleware. + * + * Usage: + * throw new NotFoundError('Story'); + * throw new ValidationError('Invalid input', { field: ['Error message'] }); + * throw new AuthenticationError('Invalid credentials'); + * + * Error Handler: + * All errors are caught by the error handler middleware in + * middlewares/errors/error-handler.ts which formats the response. + */ + +/** + * Base error class for all application errors + * @param statusCode - HTTP status code + * @param message - Error message + * @param isOperational - Whether this is an operational error (vs programming error) + * @param code - Error code for client-side handling + */ +export class AppError extends Error { + constructor( + public statusCode: number, + public message: string, + public isOperational = true, + public code?: string + ) { + super(message); + Object.setPrototypeOf(this, AppError.prototype); + Error.captureStackTrace(this, this.constructor); + } +} + +/** + * Validation error (400) - Used when request validation fails + * @param message - Error message + * @param errors - Field-specific validation errors + */ +export class ValidationError extends AppError { + constructor(message: string, public errors?: Record) { + super(400, message, true, 'VALIDATION_ERROR'); + this.name = 'ValidationError'; + } +} + +/** + * Authentication error (401) - Used when user is not authenticated + * @param message - Error message (default: 'Authentication required') + */ +export class AuthenticationError extends AppError { + constructor(message: string = 'Authentication required') { + super(401, message, true, 'AUTHENTICATION_ERROR'); + this.name = 'AuthenticationError'; + } +} + +/** + * Authorization error (403) - Used when user lacks required permissions + * @param message - Error message (default: 'Insufficient permissions') + */ +export class AuthorizationError extends AppError { + constructor(message: string = 'Insufficient permissions') { + super(403, message, true, 'AUTHORIZATION_ERROR'); + this.name = 'AuthorizationError'; + } +} + +/** + * Not found error (404) - Used when a resource doesn't exist + * @param resource - Resource name (e.g., 'Story', 'User') + */ +export class NotFoundError extends AppError { + constructor(resource: string = 'Resource') { + super(404, `${resource} not found`, true, 'NOT_FOUND'); + this.name = 'NotFoundError'; + } +} + +/** + * Conflict error (409) - Used when there's a resource conflict + * @param message - Error message describing the conflict + */ +export class ConflictError extends AppError { + constructor(message: string) { + super(409, message, true, 'CONFLICT'); + this.name = 'ConflictError'; + } +} + +/** + * Internal server error (500) - Used for unexpected server errors + * @param message - Error message (default: 'Internal server error') + */ +export class InternalServerError extends AppError { + constructor(message: string = 'Internal server error') { + super(500, message, false, 'INTERNAL_ERROR'); + this.name = 'InternalServerError'; + } +} diff --git a/src/validations/auth.schemas.ts b/src/validations/auth.schemas.ts new file mode 100644 index 0000000..0547679 --- /dev/null +++ b/src/validations/auth.schemas.ts @@ -0,0 +1,16 @@ +/** + * Authentication Validation Schemas + * + * Re-exports the login schema from the database schema to maintain + * a single source of truth. All authentication validation should use + * schemas from this file. + * + * Usage: + * router.post('/auth/login', validate(loginSchema), handler); + */ +import { z } from 'zod'; +import { loginSchema } from '../../config/database/schema'; + +// Re-export login schema from database schema (single source of truth) +export { loginSchema }; +export type LoginInput = z.infer; diff --git a/src/validations/contact.schemas.ts b/src/validations/contact.schemas.ts new file mode 100644 index 0000000..c990fbf --- /dev/null +++ b/src/validations/contact.schemas.ts @@ -0,0 +1,34 @@ +/** + * Contact Message Validation Schemas + * + * These schemas extend the database insert schemas from config/database/schema.ts + * to add API-specific validation rules. This follows DRY principles by using + * the database schema as the single source of truth. + * + * Usage: + * - Use createContactMessageSchema for POST /api/v1/contact-messages + * - Use replyContactMessageSchema for POST /api/v1/contact-messages/:id/reply + * + * Example: + * router.post('/contact-messages', validate(createContactMessageSchema), handler); + */ +import { z } from 'zod'; +import { insertContactMessageSchema } from '../../config/database/schema'; + +// Extends database schema with API-specific validation constraints +export const createContactMessageSchema = insertContactMessageSchema.extend({ + name: z.string().min(1, 'Name is required').max(100), + email: z.string().email('Invalid email address'), + subject: z.string().min(1, 'Subject is required').max(200), + message: z.string().min(1, 'Message is required').max(5000), +}); + +// Schema for replying to contact messages (doesn't need full contact info) +export const replyContactMessageSchema = z.object({ + subject: z.string().min(1, 'Subject is required').max(200), + message: z.string().min(1, 'Message is required').max(5000), +}); + +// TypeScript types inferred from schemas for type-safe request handling +export type CreateContactMessageInput = z.infer; +export type ReplyContactMessageInput = z.infer; diff --git a/src/validations/event.schemas.ts b/src/validations/event.schemas.ts new file mode 100644 index 0000000..7a6e4a4 --- /dev/null +++ b/src/validations/event.schemas.ts @@ -0,0 +1,32 @@ +/** + * Event Validation Schemas + * + * Extends database insert schemas with API-specific validation rules. + * Event date can be provided as ISO string or Date object. + * + * Usage: + * - createEventSchema: POST /api/v1/events + * - updateEventSchema: PATCH /api/v1/events/:id + * + * Example: + * router.post('/events', requireAuth, upload.single('eventImage'), + * validate(createEventSchema), handler); + */ +import { z } from 'zod'; +import { insertEventSchema } from '../../config/database/schema'; + +// Extends database schema with API-specific validation +export const createEventSchema = insertEventSchema.extend({ + title: z.string().min(1, 'Title is required').max(200), + description: z.string().min(1).optional(), + eventDate: z.string().datetime().or(z.date()), // Accepts ISO string or Date + location: z.string().min(1).optional(), + imageUrl: z.string().url().optional().nullable(), +}); + +// Update schema allows partial updates +export const updateEventSchema = createEventSchema.partial(); + +// TypeScript types for type-safe request handling +export type CreateEventInput = z.infer; +export type UpdateEventInput = z.infer; diff --git a/src/validations/library.schemas.ts b/src/validations/library.schemas.ts new file mode 100644 index 0000000..549f4f9 --- /dev/null +++ b/src/validations/library.schemas.ts @@ -0,0 +1,33 @@ +/** + * Library Validation Schemas + * + * Extends database insert schemas with API-specific validation rules. + * Most fields are optional to allow flexible library creation/updates. + * + * Usage: + * - createLibrarySchema: POST /api/v1/libraries (super_admin only) + * - updateLibrarySchema: PATCH /api/v1/libraries/:id + * + * Example: + * router.post('/libraries', requireSuperAdmin, validate(createLibrarySchema), handler); + */ +import { z } from 'zod'; +import { insertLibrarySchema } from '../../config/database/schema'; + +// Extends database schema with API-specific validation and makes fields optional +export const createLibrarySchema = insertLibrarySchema + .extend({ + name: z.string().min(1, 'Name is required').max(200), + description: z.string().min(1).optional(), + website: z.string().url().optional(), + logoUrl: z.string().url().optional().nullable(), + featuredImageUrl: z.string().url().optional().nullable(), + }) + .partial(); // Make most fields optional for API flexibility + +// Update schema allows partial updates +export const updateLibrarySchema = createLibrarySchema.partial(); + +// TypeScript types for type-safe request handling +export type CreateLibraryInput = z.infer; +export type UpdateLibraryInput = z.infer; diff --git a/src/validations/media.schemas.ts b/src/validations/media.schemas.ts new file mode 100644 index 0000000..1bdb009 --- /dev/null +++ b/src/validations/media.schemas.ts @@ -0,0 +1,43 @@ +/** + * Media Item Validation Schemas + * + * Extends database insert schemas with API-specific validation rules. + * URL is optional since media can be uploaded via file upload. + * + * Usage: + * - createMediaItemSchema: POST /api/v1/media-items + * - updateMediaItemSchema: PATCH /api/v1/media-items/:id + * - mediaQuerySchema: GET /api/v1/media-items (query parameters) + * + * Example: + * router.post('/media-items', requireAuth, upload.single('mediaFile'), + * validate(createMediaItemSchema), handler); + */ +import { z } from 'zod'; +import { insertMediaItemSchema } from '../../config/database/schema'; + +// Extends database schema with API-specific validation +export const createMediaItemSchema = insertMediaItemSchema.extend({ + title: z.string().min(1, 'Title is required').max(200), + url: z.string().url().optional(), // URL optional if file upload provided + tags: z.array(z.string()).optional().default([]), +}); + +// Update schema allows partial updates +export const updateMediaItemSchema = createMediaItemSchema.partial(); + +// Query parameter schema for filtering media items +export const mediaQuerySchema = z.object({ + libraryId: z.string().optional(), + galleryId: z.string().optional(), + approved: z.string().transform((val) => val === 'true').optional(), + mediaType: z.enum(['image', 'video', 'audio']).optional(), + tag: z.union([z.string(), z.array(z.string())]).optional(), + limit: z.string().transform(Number).optional(), + offset: z.string().transform(Number).optional(), +}); + +// TypeScript types for type-safe request handling +export type CreateMediaItemInput = z.infer; +export type UpdateMediaItemInput = z.infer; +export type MediaQueryInput = z.infer; diff --git a/src/validations/story.schemas.ts b/src/validations/story.schemas.ts new file mode 100644 index 0000000..9fa8dd1 --- /dev/null +++ b/src/validations/story.schemas.ts @@ -0,0 +1,51 @@ +/** + * Story Validation Schemas + * + * Extends database insert schemas with API-specific validation rules. + * The createStorySchema omits 'summary' (not required in API) and adds + * stricter validation for title, content, and featuredImageUrl. + * + * Usage: + * - createStorySchema: POST /api/v1/admin/stories + * - updateStorySchema: PATCH /api/v1/admin/stories/:id + * - storyQuerySchema: GET /api/v1/stories (query parameters) + * + * Example: + * router.post('/admin/stories', validate(createStorySchema), handler); + * router.get('/stories', validateQuery(storyQuerySchema), handler); + */ +import { z } from 'zod'; +import { insertStorySchema } from '../../config/database/schema'; + +// Base schema for creating stories - extends DB schema with API constraints +export const createStorySchema = insertStorySchema + .omit({ summary: true }) // summary not required in API + .extend({ + title: z.string().min(1, 'Title is required').max(200), + content: z.string().min(1, 'Content is required'), + featuredImageUrl: z.string().url().optional().nullable(), + tags: z.array(z.string()).optional().default([]), + }) + .refine((data) => !data.libraryId || z.string().uuid().safeParse(data.libraryId).success, { + message: 'Library ID must be a valid UUID', + path: ['libraryId'], + }); + +// Update schema allows partial updates (all fields optional) +export const updateStorySchema = createStorySchema.partial(); + +// Query parameter schema for filtering stories +export const storyQuerySchema = z.object({ + libraryId: z.string().optional(), + published: z.string().transform((val) => val === 'true').optional(), + approved: z.string().transform((val) => val === 'true').optional(), + featured: z.string().transform((val) => val === 'true').optional(), + tag: z.union([z.string(), z.array(z.string())]).optional(), + limit: z.string().transform(Number).optional(), + offset: z.string().transform(Number).optional(), +}); + +// TypeScript types for type-safe request handling +export type CreateStoryInput = z.infer; +export type UpdateStoryInput = z.infer; +export type StoryQueryInput = z.infer; diff --git a/src/validations/user.schemas.ts b/src/validations/user.schemas.ts new file mode 100644 index 0000000..4fcd0d3 --- /dev/null +++ b/src/validations/user.schemas.ts @@ -0,0 +1,37 @@ +/** + * User Validation Schemas + * + * Extends database insert schemas with API-specific validation rules. + * Password requirements are stricter than database constraints for security. + * + * Usage: + * - createUserSchema: POST /api/v1/superadmin/users + * - updateUserSchema: PATCH /api/v1/superadmin/users/:id + * - resetPasswordSchema: POST /api/v1/superadmin/users/:id/reset-password + * + * Example: + * router.post('/superadmin/users', requireSuperAdmin, validate(createUserSchema), handler); + */ +import { z } from 'zod'; +import { insertUserSchema } from '../../config/database/schema'; + +// Extends database schema with stronger password requirements and validation +export const createUserSchema = insertUserSchema.extend({ + password: z.string().min(8, 'Password must be at least 8 characters'), // Stronger than DB requirement + username: z.string().min(3, 'Username must be at least 3 characters').max(50), + email: z.string().email('Invalid email address'), + fullName: z.string().min(1, 'Full name is required').max(100), +}); + +// Update schema excludes password (use resetPasswordSchema for password changes) +export const updateUserSchema = createUserSchema.partial().omit({ password: true }); + +// Schema for password reset (separate from user update) +export const resetPasswordSchema = z.object({ + password: z.string().min(8, 'Password must be at least 8 characters'), +}); + +// TypeScript types for type-safe request handling +export type CreateUserInput = z.infer; +export type UpdateUserInput = z.infer; +export type ResetPasswordInput = z.infer; diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..83deb48 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,141 @@ +# Testing Documentation + +This directory contains unit tests for the museumCall backend project. + +## Test Structure + +``` +tests/ +├── setup.ts # Jest setup file +├── utils/ +│ └── mocks.ts # Common mocks and utilities +├── unit/ +│ ├── controllers/ # Controller unit tests +│ ├── middlewares/ # Middleware unit tests +│ └── utils/ # Utility unit tests +└── README.md # This file +``` + +## Running Tests + +### Run all tests +```bash +npm test +# or +pnpm test +``` + +### Run tests in watch mode +```bash +npm run test:watch +# or +pnpm test:watch +``` + +### Run tests with coverage +```bash +npm test -- --coverage +# or +pnpm test -- --coverage +``` + +### Run a specific test file +```bash +npm test -- tests/unit/utils/errors.test.ts +``` + +## Test Coverage + +The project uses Jest for testing with the following coverage goals: +- **Statements**: > 80% +- **Branches**: > 80% +- **Functions**: > 80% +- **Lines**: > 80% + +Coverage reports are generated in the `coverage/` directory. + +## Writing Tests + +### Test File Naming +- Test files should be named `*.test.ts` or `*.spec.ts` +- Place test files next to the source files or in the `tests/` directory + +### Test Structure +```typescript +describe('ComponentName', () => { + beforeEach(() => { + // Setup code + }); + + describe('methodName', () => { + it('should do something', () => { + // Test implementation + }); + }); +}); +``` + +### Using Mocks +Common mocks are available in `tests/utils/mocks.ts`: +- `createMockRequest()` - Creates a mock Express Request +- `createMockResponse()` - Creates a mock Express Response +- `createMockNext()` - Creates a mock NextFunction +- `createMockSession()` - Creates a mock session with user data + +### Example Test +```typescript +import { createMockRequest, createMockResponse, createMockNext } from '../utils/mocks'; + +describe('MyMiddleware', () => { + it('should handle request correctly', () => { + const req = createMockRequest(); + const res = createMockResponse(); + const next = createMockNext(); + + // Test implementation + }); +}); +``` + +## Current Test Coverage + +### Unit Tests +- ✅ Error classes (`src/utils/errors.ts`) +- ✅ Auth middleware (`src/middlewares/auth.ts`) +- ✅ Validation middleware (`src/middlewares/validation.ts`) +- ✅ Auth controller (`src/controllers/auth.controller.ts`) + +### Future Tests +- Integration tests for routes +- Service layer tests +- Database operation tests + +## Best Practices + +1. **Isolation**: Each test should be independent and not rely on other tests +2. **Mocking**: Mock external dependencies (database, APIs, etc.) +3. **Clear Names**: Use descriptive test names that explain what is being tested +4. **Arrange-Act-Assert**: Structure tests with clear setup, execution, and verification +5. **Coverage**: Aim for high coverage but focus on testing critical paths + +## Troubleshooting + +### Jest dependency errors +If you encounter errors like "Cannot find module '@jest/test-sequencer'", try: +```bash +pnpm install +# or +npm install +``` + +### Tests fail with module resolution errors +- Ensure `tsconfig.json` includes the test files +- Check that `jest.config.js` has correct `moduleNameMapper` settings + +### Mock not working +- Ensure mocks are imported before the module being tested +- Check that `jest.mock()` is called at the top level of the test file + +### Type errors in tests +- Ensure `@types/jest` is installed +- Check that `tsconfig.json` includes `"jest"` in the `types` array diff --git a/tests/setup.ts b/tests/setup.ts new file mode 100644 index 0000000..eb107ec --- /dev/null +++ b/tests/setup.ts @@ -0,0 +1,18 @@ +/** + * Jest Setup File + * + * This file runs before all tests and sets up the testing environment. + */ + +// Mock environment variables if needed +process.env.NODE_ENV = 'test'; + +// Suppress console logs during tests (optional - uncomment if needed) +// global.console = { +// ...console, +// log: jest.fn(), +// debug: jest.fn(), +// info: jest.fn(), +// warn: jest.fn(), +// error: jest.fn(), +// }; diff --git a/tests/unit/controllers/auth.controller.test.ts b/tests/unit/controllers/auth.controller.test.ts new file mode 100644 index 0000000..0ee90fe --- /dev/null +++ b/tests/unit/controllers/auth.controller.test.ts @@ -0,0 +1,223 @@ +/** + * Unit Tests for Auth Controller + */ + +import { Request, Response } from 'express'; +import { compare } from 'bcrypt'; +import { AuthController } from '../../../src/controllers/auth.controller'; +import { AuthenticationError } from '../../../src/utils/errors'; +import drizzleService from '../../../services/drizzle-services'; +import { logger } from '../../../src/middlewares/logger'; +import { createMockRequest, createMockResponse, createMockSession } from '../../utils/mocks'; + +// Mock dependencies +jest.mock('bcrypt'); +jest.mock('../../../services/drizzle-services'); +jest.mock('../../../src/middlewares/logger', () => ({ + logger: { + info: jest.fn(), + error: jest.fn(), + warn: jest.fn(), + }, +})); + +const mockedCompare = compare as jest.MockedFunction; +const mockedDrizzleService = drizzleService as jest.Mocked; + +describe('AuthController', () => { + let authController: AuthController; + let mockRequest: Partial; + let mockResponse: Partial; + + beforeEach(() => { + authController = new AuthController(); + mockRequest = createMockRequest(); + mockResponse = createMockResponse(); + jest.clearAllMocks(); + }); + + describe('login', () => { + const mockUser = { + id: 'user-123', + username: 'testuser', + password: 'hashedPassword', + fullName: 'Test User', + email: 'test@example.com', + role: 'library_admin', + libraryId: 'library-123', + createdAt: new Date(), + updatedAt: new Date(), + }; + + it('should successfully login with valid credentials', async () => { + mockRequest.body = { + username: 'testuser', + password: 'password123', + }; + mockRequest.session = createMockSession() as any; + + mockedDrizzleService.getUserByUsername.mockResolvedValue(mockUser as any); + (mockedCompare as jest.Mock).mockResolvedValue(true); + + await authController.login(mockRequest as Request, mockResponse as Response); + + expect(mockedDrizzleService.getUserByUsername).toHaveBeenCalledWith('testuser'); + expect(mockedCompare).toHaveBeenCalledWith('password123', 'hashedPassword'); + expect((mockRequest.session as any)!.user).toEqual({ + id: 'user-123', + username: 'testuser', + fullName: 'Test User', + email: 'test@example.com', + role: 'library_admin', + libraryId: 'library-123', + }); + expect(mockResponse.status).toHaveBeenCalledWith(200); + expect(mockResponse.json).toHaveBeenCalledWith({ + success: true, + data: { + id: 'user-123', + username: 'testuser', + fullName: 'Test User', + email: 'test@example.com', + role: 'library_admin', + libraryId: 'library-123', + }, + }); + expect(logger.info).toHaveBeenCalledWith('User logged in', { + userId: 'user-123', + username: 'testuser', + }); + }); + + it('should throw AuthenticationError when user does not exist', async () => { + mockRequest.body = { + username: 'nonexistent', + password: 'password123', + }; + + mockedDrizzleService.getUserByUsername.mockResolvedValue(undefined); + + await expect( + authController.login(mockRequest as Request, mockResponse as Response) + ).rejects.toThrow(AuthenticationError); + + expect(mockedDrizzleService.getUserByUsername).toHaveBeenCalledWith('nonexistent'); + expect(mockedCompare).not.toHaveBeenCalled(); + expect(mockResponse.json).not.toHaveBeenCalled(); + }); + + it('should throw AuthenticationError when password does not match', async () => { + mockRequest.body = { + username: 'testuser', + password: 'wrongpassword', + }; + + mockedDrizzleService.getUserByUsername.mockResolvedValue(mockUser as any); + (mockedCompare as jest.Mock).mockResolvedValue(false); + + await expect( + authController.login(mockRequest as Request, mockResponse as Response) + ).rejects.toThrow(AuthenticationError); + + expect(mockedDrizzleService.getUserByUsername).toHaveBeenCalledWith('testuser'); + expect(mockedCompare).toHaveBeenCalledWith('wrongpassword', 'hashedPassword'); + expect((mockRequest.session as any)!.user).toBeUndefined(); + }); + + it('should handle errors from getUserByUsername', async () => { + mockRequest.body = { + username: 'testuser', + password: 'password123', + }; + + mockedDrizzleService.getUserByUsername.mockRejectedValue(new Error('Database error')); + + await expect( + authController.login(mockRequest as Request, mockResponse as Response) + ).rejects.toThrow('Database error'); + }); + }); + + describe('getSession', () => { + it('should return user session when user is logged in', async () => { + const mockSessionUser = { + id: 'user-123', + username: 'testuser', + fullName: 'Test User', + email: 'test@example.com', + role: 'library_admin', + libraryId: 'library-123', + }; + + mockRequest.session = createMockSession(mockSessionUser) as any; + + await authController.getSession(mockRequest as Request, mockResponse as Response); + + expect(mockResponse.status).toHaveBeenCalledWith(200); + expect(mockResponse.json).toHaveBeenCalledWith({ + success: true, + data: mockSessionUser, + }); + }); + + it('should return null when user is not logged in', async () => { + mockRequest.session = {} as any; + + await authController.getSession(mockRequest as Request, mockResponse as Response); + + expect(mockResponse.status).toHaveBeenCalledWith(200); + expect(mockResponse.json).toHaveBeenCalledWith({ + success: true, + data: null, + }); + }); + + it('should return null when session is undefined', async () => { + mockRequest.session = undefined; + + await authController.getSession(mockRequest as Request, mockResponse as Response); + + expect(mockResponse.status).toHaveBeenCalledWith(200); + expect(mockResponse.json).toHaveBeenCalledWith({ + success: true, + data: null, + }); + }); + }); + + describe('logout', () => { + it('should successfully logout and destroy session', async () => { + const destroyCallback = jest.fn((callback: (err?: Error) => void) => callback(undefined)); + mockRequest.session = { + ...createMockSession(), + destroy: destroyCallback, + } as any; + + await authController.logout(mockRequest as Request, mockResponse as Response); + + expect(destroyCallback).toHaveBeenCalled(); + expect(mockResponse.clearCookie).toHaveBeenCalledWith('connect.sid'); + expect(mockResponse.status).toHaveBeenCalledWith(200); + expect(mockResponse.json).toHaveBeenCalledWith({ + success: true, + message: 'Logged out successfully', + }); + }); + + it('should reject when session destroy fails', async () => { + const destroyError = new Error('Destroy failed'); + const destroyCallback = jest.fn((callback: (err?: Error) => void) => callback(destroyError)); + mockRequest.session = { + ...createMockSession(), + destroy: destroyCallback, + } as any; + + await expect( + authController.logout(mockRequest as Request, mockResponse as Response) + ).rejects.toThrow('Destroy failed'); + + expect(destroyCallback).toHaveBeenCalled(); + expect(mockResponse.clearCookie).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/tests/unit/middlewares/auth.test.ts b/tests/unit/middlewares/auth.test.ts new file mode 100644 index 0000000..a26beb9 --- /dev/null +++ b/tests/unit/middlewares/auth.test.ts @@ -0,0 +1,185 @@ +/** + * Unit Tests for Auth Middleware + */ + +import { Request, Response, NextFunction } from 'express'; +import { requireAuth, requireRole, requireSuperAdmin, requireLibraryAdmin } from '../../../src/middlewares/auth'; +import { AuthenticationError, AuthorizationError } from '../../../src/utils/errors'; +import { createMockRequest, createMockResponse, createMockNext, createMockSession } from '../../utils/mocks'; + +describe('Auth Middleware', () => { + let mockRequest: Partial; + let mockResponse: Partial; + let mockNext: NextFunction; + + beforeEach(() => { + mockRequest = createMockRequest(); + mockResponse = createMockResponse(); + mockNext = createMockNext(); + }); + + describe('requireAuth', () => { + it('should call next() when user is authenticated', () => { + mockRequest.session = createMockSession(); + + requireAuth(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + expect(mockNext).toHaveBeenCalledWith(); + }); + + it('should throw AuthenticationError when user is not authenticated', () => { + mockRequest.session = {} as any; + + expect(() => { + requireAuth(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(AuthenticationError); + + expect(mockNext).not.toHaveBeenCalled(); + }); + + it('should throw AuthenticationError when session is undefined', () => { + mockRequest.session = undefined; + + expect(() => { + requireAuth(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(AuthenticationError); + }); + }); + + describe('requireRole', () => { + it('should call next() when user has required role', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'library_admin', + }); + + const middleware = requireRole('library_admin'); + middleware(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + expect(mockNext).toHaveBeenCalledWith(); + }); + + it('should call next() when user has one of multiple required roles', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'super_admin', + }); + + const middleware = requireRole('library_admin', 'super_admin'); + middleware(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + }); + + it('should throw AuthenticationError when user is not authenticated', () => { + mockRequest.session = {} as any; + + const middleware = requireRole('library_admin'); + + expect(() => { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(AuthenticationError); + }); + + it('should throw AuthorizationError when user does not have required role', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'user', + }); + + const middleware = requireRole('library_admin', 'super_admin'); + + expect(() => { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(AuthorizationError); + }); + + it('should include required roles in error message', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'user', + }); + + const middleware = requireRole('library_admin', 'super_admin'); + + try { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + fail('Should have thrown AuthorizationError'); + } catch (error) { + expect(error).toBeInstanceOf(AuthorizationError); + expect((error as AuthorizationError).message).toContain('library_admin'); + expect((error as AuthorizationError).message).toContain('super_admin'); + } + }); + }); + + describe('requireSuperAdmin', () => { + it('should call next() when user is super_admin', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'super_admin', + }); + + requireSuperAdmin(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + }); + + it('should throw AuthorizationError when user is not super_admin', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'library_admin', + }); + + expect(() => { + requireSuperAdmin(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(AuthorizationError); + }); + }); + + describe('requireLibraryAdmin', () => { + it('should call next() when user is library_admin', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'library_admin', + }); + + requireLibraryAdmin(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + }); + + it('should call next() when user is super_admin', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'super_admin', + }); + + requireLibraryAdmin(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + }); + + it('should throw AuthorizationError when user is not library_admin or super_admin', () => { + mockRequest.session = createMockSession({ + id: 'test-id', + username: 'testuser', + role: 'user', + }); + + expect(() => { + requireLibraryAdmin(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(AuthorizationError); + }); + }); +}); diff --git a/tests/unit/middlewares/validation.test.ts b/tests/unit/middlewares/validation.test.ts new file mode 100644 index 0000000..ffef496 --- /dev/null +++ b/tests/unit/middlewares/validation.test.ts @@ -0,0 +1,223 @@ +/** + * Unit Tests for Validation Middleware + */ + +import { Request, Response, NextFunction } from 'express'; +import { z } from 'zod'; +import { validate, validateQuery, validateParams } from '../../../src/middlewares/validation'; +import { ValidationError } from '../../../src/utils/errors'; +import { createMockRequest, createMockResponse, createMockNext } from '../../utils/mocks'; + +describe('Validation Middleware', () => { + let mockRequest: Partial; + let mockResponse: Partial; + let mockNext: NextFunction; + + beforeEach(() => { + mockRequest = createMockRequest(); + mockResponse = createMockResponse(); + mockNext = createMockNext(); + }); + + describe('validate', () => { + const loginSchema = z.object({ + username: z.string().min(1, 'Username is required'), + password: z.string().min(6, 'Password must be at least 6 characters'), + }); + + it('should call next() when validation passes', () => { + mockRequest.body = { + username: 'testuser', + password: 'password123', + }; + + const middleware = validate(loginSchema); + middleware(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + expect(mockNext).toHaveBeenCalledWith(); + }); + + it('should throw ValidationError when validation fails', () => { + mockRequest.body = { + username: '', + password: '123', + }; + + const middleware = validate(loginSchema); + + expect(() => { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(ValidationError); + }); + + it('should format validation errors correctly', () => { + mockRequest.body = { + username: '', + password: '123', + }; + + const middleware = validate(loginSchema); + + try { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + fail('Should have thrown ValidationError'); + } catch (error) { + expect(error).toBeInstanceOf(ValidationError); + const validationError = error as ValidationError; + expect(validationError.errors).toBeDefined(); + expect(validationError.errors).toHaveProperty('username'); + expect(validationError.errors).toHaveProperty('password'); + } + }); + + it('should handle nested validation errors', () => { + const nestedSchema = z.object({ + user: z.object({ + name: z.string().min(1, 'Name is required'), + email: z.string().email('Invalid email'), + }), + }); + + mockRequest.body = { + user: { + name: '', + email: 'invalid-email', + }, + }; + + const middleware = validate(nestedSchema); + + try { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + fail('Should have thrown ValidationError'); + } catch (error) { + expect(error).toBeInstanceOf(ValidationError); + const validationError = error as ValidationError; + expect(validationError.errors).toBeDefined(); + expect(validationError.errors).toHaveProperty('user.name'); + expect(validationError.errors).toHaveProperty('user.email'); + } + }); + + it('should pass non-ZodError to next', () => { + const invalidSchema = {} as z.ZodSchema; + mockRequest.body = {}; + + const middleware = validate(invalidSchema); + + // This should not throw but pass error to next + middleware(mockRequest as Request, mockResponse as Response, mockNext); + + // The error handling depends on implementation, but next should be called + expect(mockNext).toHaveBeenCalled(); + }); + }); + + describe('validateQuery', () => { + const querySchema = z.object({ + page: z.string().optional(), + limit: z.string().optional(), + search: z.string().optional(), + }); + + it('should call next() when query validation passes', () => { + mockRequest.query = { + page: '1', + limit: '10', + search: 'test', + }; + + const middleware = validateQuery(querySchema); + middleware(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + }); + + it('should throw ValidationError when query validation fails', () => { + const strictQuerySchema = z.object({ + page: z.string().min(1, 'Page is required'), + }); + + mockRequest.query = { + page: '', + }; + + const middleware = validateQuery(strictQuerySchema); + + expect(() => { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(ValidationError); + }); + + it('should format query validation errors correctly', () => { + const strictQuerySchema = z.object({ + page: z.string().min(1, 'Page is required'), + }); + + mockRequest.query = { + page: '', + }; + + const middleware = validateQuery(strictQuerySchema); + + try { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + fail('Should have thrown ValidationError'); + } catch (error) { + expect(error).toBeInstanceOf(ValidationError); + const validationError = error as ValidationError; + expect(validationError.message).toBe('Query validation failed'); + expect(validationError.errors).toBeDefined(); + } + }); + }); + + describe('validateParams', () => { + const paramsSchema = z.object({ + id: z.string().uuid('Invalid ID format'), + }); + + it('should call next() when params validation passes', () => { + mockRequest.params = { + id: '123e4567-e89b-12d3-a456-426614174000', + }; + + const middleware = validateParams(paramsSchema); + middleware(mockRequest as Request, mockResponse as Response, mockNext); + + expect(mockNext).toHaveBeenCalledTimes(1); + }); + + it('should throw ValidationError when params validation fails', () => { + mockRequest.params = { + id: 'invalid-id', + }; + + const middleware = validateParams(paramsSchema); + + expect(() => { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + }).toThrow(ValidationError); + }); + + it('should format params validation errors correctly', () => { + mockRequest.params = { + id: 'invalid-id', + }; + + const middleware = validateParams(paramsSchema); + + try { + middleware(mockRequest as Request, mockResponse as Response, mockNext); + fail('Should have thrown ValidationError'); + } catch (error) { + expect(error).toBeInstanceOf(ValidationError); + const validationError = error as ValidationError; + expect(validationError.message).toBe('Parameter validation failed'); + expect(validationError.errors).toBeDefined(); + expect(validationError.errors).toHaveProperty('id'); + } + }); + }); +}); diff --git a/tests/unit/utils/errors.test.ts b/tests/unit/utils/errors.test.ts new file mode 100644 index 0000000..acdb5df --- /dev/null +++ b/tests/unit/utils/errors.test.ts @@ -0,0 +1,149 @@ +/** + * Unit Tests for Error Classes + */ + +import { + AppError, + ValidationError, + AuthenticationError, + AuthorizationError, + NotFoundError, + ConflictError, + InternalServerError, +} from '../../../src/utils/errors'; + +describe('Error Classes', () => { + describe('AppError', () => { + it('should create an AppError with default values', () => { + const error = new AppError(400, 'Test error'); + + expect(error).toBeInstanceOf(Error); + expect(error).toBeInstanceOf(AppError); + expect(error.statusCode).toBe(400); + expect(error.message).toBe('Test error'); + expect(error.isOperational).toBe(true); + expect(error.code).toBeUndefined(); + }); + + it('should create an AppError with custom code', () => { + const error = new AppError(500, 'Server error', false, 'CUSTOM_CODE'); + + expect(error.statusCode).toBe(500); + expect(error.message).toBe('Server error'); + expect(error.isOperational).toBe(false); + expect(error.code).toBe('CUSTOM_CODE'); + }); + }); + + describe('ValidationError', () => { + it('should create a ValidationError with default message', () => { + const error = new ValidationError('Validation failed'); + + expect(error).toBeInstanceOf(AppError); + expect(error.statusCode).toBe(400); + expect(error.message).toBe('Validation failed'); + expect(error.code).toBe('VALIDATION_ERROR'); + expect(error.name).toBe('ValidationError'); + expect(error.errors).toBeUndefined(); + }); + + it('should create a ValidationError with field errors', () => { + const fieldErrors = { + username: ['Username is required'], + email: ['Email is invalid'], + }; + const error = new ValidationError('Validation failed', fieldErrors); + + expect(error.errors).toEqual(fieldErrors); + }); + }); + + describe('AuthenticationError', () => { + it('should create an AuthenticationError with default message', () => { + const error = new AuthenticationError(); + + expect(error).toBeInstanceOf(AppError); + expect(error.statusCode).toBe(401); + expect(error.message).toBe('Authentication required'); + expect(error.code).toBe('AUTHENTICATION_ERROR'); + expect(error.name).toBe('AuthenticationError'); + }); + + it('should create an AuthenticationError with custom message', () => { + const error = new AuthenticationError('Invalid credentials'); + + expect(error.statusCode).toBe(401); + expect(error.message).toBe('Invalid credentials'); + }); + }); + + describe('AuthorizationError', () => { + it('should create an AuthorizationError with default message', () => { + const error = new AuthorizationError(); + + expect(error).toBeInstanceOf(AppError); + expect(error.statusCode).toBe(403); + expect(error.message).toBe('Insufficient permissions'); + expect(error.code).toBe('AUTHORIZATION_ERROR'); + expect(error.name).toBe('AuthorizationError'); + }); + + it('should create an AuthorizationError with custom message', () => { + const error = new AuthorizationError('Access denied'); + + expect(error.statusCode).toBe(403); + expect(error.message).toBe('Access denied'); + }); + }); + + describe('NotFoundError', () => { + it('should create a NotFoundError with default message', () => { + const error = new NotFoundError(); + + expect(error).toBeInstanceOf(AppError); + expect(error.statusCode).toBe(404); + expect(error.message).toBe('Resource not found'); + expect(error.code).toBe('NOT_FOUND'); + expect(error.name).toBe('NotFoundError'); + }); + + it('should create a NotFoundError with resource name', () => { + const error = new NotFoundError('User'); + + expect(error.statusCode).toBe(404); + expect(error.message).toBe('User not found'); + }); + }); + + describe('ConflictError', () => { + it('should create a ConflictError', () => { + const error = new ConflictError('Resource already exists'); + + expect(error).toBeInstanceOf(AppError); + expect(error.statusCode).toBe(409); + expect(error.message).toBe('Resource already exists'); + expect(error.code).toBe('CONFLICT'); + expect(error.name).toBe('ConflictError'); + }); + }); + + describe('InternalServerError', () => { + it('should create an InternalServerError with default message', () => { + const error = new InternalServerError(); + + expect(error).toBeInstanceOf(AppError); + expect(error.statusCode).toBe(500); + expect(error.message).toBe('Internal server error'); + expect(error.code).toBe('INTERNAL_ERROR'); + expect(error.name).toBe('InternalServerError'); + expect(error.isOperational).toBe(false); + }); + + it('should create an InternalServerError with custom message', () => { + const error = new InternalServerError('Database connection failed'); + + expect(error.statusCode).toBe(500); + expect(error.message).toBe('Database connection failed'); + }); + }); +}); diff --git a/tests/utils/mocks.ts b/tests/utils/mocks.ts new file mode 100644 index 0000000..47ba89e --- /dev/null +++ b/tests/utils/mocks.ts @@ -0,0 +1,67 @@ +/** + * Test Utilities and Mocks + * + * Common mocks and utilities for testing + */ + +import { Request, Response, NextFunction } from 'express'; +import { Session } from 'express-session'; + +/** + * Creates a mock Express Request object + */ +export const createMockRequest = (overrides?: Partial): Partial => { + return { + body: {}, + params: {}, + query: {}, + session: undefined, + ...overrides, + } as Partial; +}; + +/** + * Creates a mock Express Response object + */ +export const createMockResponse = (): Partial => { + const res: Partial = { + status: jest.fn().mockReturnThis(), + json: jest.fn().mockReturnThis(), + send: jest.fn().mockReturnThis(), + clearCookie: jest.fn().mockReturnThis(), + }; + return res; +}; + +/** + * Creates a mock Express NextFunction + */ +export const createMockNext = (): NextFunction => { + return jest.fn(); +}; + +/** + * Creates a mock session with user data + */ +export const createMockSession = (user?: { + id: string; + username: string; + fullName: string; + email: string; + role: string; + libraryId?: string; +}): any => { + const defaultUser = { + id: 'test-user-id', + username: 'testuser', + fullName: 'Test User', + email: 'test@example.com', + role: 'library_admin', + libraryId: 'test-library-id', + }; + + return { + user: user || defaultUser, + destroy: jest.fn((callback: (err?: Error) => void) => callback(undefined)), + }; +};