chore: improve DX scripts, fix test suite, and harden tooling
All checks were successful
Deploy to Production / test (push) Successful in 32s

Scripts:
- remote.sh: remove unused open_browser() function
- deploy-remote.sh: add DB backup before deploy, --skip-backup flag, step numbering
- db-backup.sh: fix macOS compat (xargs -r is GNU-only), use portable approach
- db-restore.sh: add safety backup before restore, SQL file validation, file size display
- logs.sh: default to no-follow with --tail=100, order-independent arg parsing
- docker-cleanup.sh: add Docker health check, colored output
- test-sequential.sh: exclude *.integration.test.ts by default, add --integration flag
- simulate-ci.sh: pass --integration flag (has real DB)

Tests:
- db.test.ts: fix mock path from ./DrizzleClient to @shared/db/DrizzleClient
- server.settings.test.ts: rewrite mocks for gameSettingsService (old config/saveConfig removed)
- server.test.ts: add missing config.lootdrop and BotClient mocks, complete DrizzleClient chain
- indexes.test.ts: rename to indexes.integration.test.ts (requires live DB)

Config:
- package.json: test script uses sequential runner, add test:ci and db:restore aliases
- deploy.yml: use --integration flag in CI (has Postgres service)
This commit is contained in:
syntaxbullet
2026-02-13 14:39:02 +01:00
parent f822d90dd3
commit aca5538d57
16 changed files with 263 additions and 119 deletions

View File

@@ -95,6 +95,6 @@ jobs:
ADMIN_TOKEN="admin_token_123"
LOG_LEVEL="error"
EOF
bash shared/scripts/test-sequential.sh
bash shared/scripts/test-sequential.sh --integration
env:
NODE_ENV: test

View File

@@ -1,7 +1,7 @@
import { describe, it, expect, mock, beforeEach } from "bun:test";
// Mock DrizzleClient
mock.module("./DrizzleClient", () => ({
// Mock DrizzleClient — must match the import path used in db.ts
mock.module("@shared/db/DrizzleClient", () => ({
DrizzleClient: {
transaction: async (cb: any) => cb("MOCK_TX")
}

View File

@@ -24,7 +24,9 @@
"remote": "bash shared/scripts/remote.sh",
"logs": "bash shared/scripts/logs.sh",
"db:backup": "bash shared/scripts/db-backup.sh",
"test": "bun test",
"db:restore": "bash shared/scripts/db-restore.sh",
"test": "bash shared/scripts/test-sequential.sh",
"test:ci": "bash shared/scripts/test-sequential.sh --integration",
"docker:cleanup": "bash shared/scripts/docker-cleanup.sh"
},
"dependencies": {
@@ -35,4 +37,4 @@
"postgres": "^3.4.8",
"zod": "^4.3.6"
}
}
}

View File

@@ -2,6 +2,10 @@
# =============================================================================
# Aurora Database Backup Script
# =============================================================================
# Creates a timestamped PostgreSQL backup and retains the last 10 backups.
#
# Usage: ./db-backup.sh
# =============================================================================
set -e
@@ -37,9 +41,13 @@ if docker ps | grep -q aurora_db; then
echo -e " 📂 File: $BACKUP_FILE"
echo -e " 📏 Size: $(du -h "$BACKUP_FILE" | cut -f1)"
# Keep only last 10 backups
# Keep only last 10 backups (cross-platform: works on macOS and Linux)
cd "$BACKUP_DIR"
ls -t backup_*.sql | tail -n +11 | xargs -r rm --
OLD_BACKUPS=$(ls -t backup_*.sql 2>/dev/null | tail -n +11)
if [ -n "$OLD_BACKUPS" ]; then
echo "$OLD_BACKUPS" | xargs rm --
echo -e " ${GREEN}${NC} Pruned old backups"
fi
else
echo -e " ${RED}${NC} Backup created but empty. Something went wrong."
rm -f "$BACKUP_FILE"

View File

@@ -2,7 +2,10 @@
# =============================================================================
# Aurora Database Restore Script
# =============================================================================
# Usage: ./db-restore.sh [path/to/backup.sql]
# Restores the database from a SQL backup file.
# Automatically creates a safety backup before overwriting.
#
# Usage: ./db-restore.sh <path-to-backup.sql>
# =============================================================================
set -e
@@ -14,6 +17,8 @@ if [ -f .env ]; then
set +a
fi
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
@@ -23,6 +28,7 @@ NC='\033[0m'
if [ -z "$1" ]; then
echo -e "${RED}Error: Please specify the backup file to restore.${NC}"
echo "Usage: ./db-restore.sh <path-to-sql-file>"
echo ""
echo "Available backups:"
ls -lh shared/db/backups/*.sql 2>/dev/null || echo " (No backups found in shared/db/backups)"
exit 1
@@ -35,29 +41,42 @@ if [ ! -f "$BACKUP_FILE" ]; then
exit 1
fi
# Validate the backup file looks like SQL
if ! head -1 "$BACKUP_FILE" | grep -qiE '(^--|^SET|^CREATE|^INSERT|^\\\\connect|^pg_dump)'; then
echo -e "${YELLOW}⚠️ Warning: File does not appear to be a SQL dump.${NC}"
read -p "Continue anyway? (y/N): " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Operation cancelled."
exit 0
fi
fi
echo -e "${YELLOW}⚠️ WARNING: This will OVERWRITE the current database!${NC}"
echo -e "Target Database: ${DB_NAME:-auroradev}"
echo -e "Backup File: $BACKUP_FILE"
echo -e "Backup File: $BACKUP_FILE"
echo -e "File Size: $(du -h "$BACKUP_FILE" | cut -f1)"
echo ""
read -p "Are you sure you want to proceed? (y/N): " -n 1 -r
echo ""
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo -e "${YELLOW}♻️ Restoring database...${NC}"
if docker ps | grep -q aurora_db; then
# Drop and recreate public schema to ensure clean slate, then restore
# Note: dependent on how the dump was created. Standard pg_dump usually includes CREATE commands if configured,
# but often it's data only or structure+data.
# For safety, we'll just pipe the file to psql.
cat "$BACKUP_FILE" | docker exec -i aurora_db psql -U "${DB_USER:-auroradev}" -d "${DB_NAME:-auroradev}"
echo -e " ${GREEN}${NC} Restore complete!"
else
echo -e "${RED}Error: Database container (aurora_db) is not running!${NC}"
exit 1
if ! docker ps | grep -q aurora_db; then
echo -e "${RED}Error: Database container (aurora_db) is not running!${NC}"
exit 1
fi
# Create a safety backup before restoring
echo -e "${YELLOW}💾 Creating safety backup before restore...${NC}"
bash "$SCRIPT_DIR/db-backup.sh" || {
echo -e "${RED}⚠️ Safety backup failed. Aborting restore.${NC}"
exit 1
}
echo -e "${YELLOW}♻️ Restoring database...${NC}"
cat "$BACKUP_FILE" | docker exec -i aurora_db psql -U "${DB_USER:-auroradev}" -d "${DB_NAME:-auroradev}"
echo -e " ${GREEN}${NC} Restore complete!"
else
echo "Operation cancelled."
exit 0

View File

@@ -1,4 +1,13 @@
#!/bin/bash
# =============================================================================
# Aurora Remote Deployment Script
# =============================================================================
# Deploys the application to a remote VPS via SSH.
# Performs a database backup, pulls latest code, builds, and restarts services.
#
# Usage: ./deploy-remote.sh [--skip-backup]
# =============================================================================
set -e
# Load environment variables
@@ -8,8 +17,14 @@ if [ -f .env ]; then
set +a
fi
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
if [ -z "$VPS_HOST" ] || [ -z "$VPS_USER" ]; then
echo "Error: VPS_HOST and VPS_USER must be set in .env"
echo -e "${RED}Error: VPS_HOST and VPS_USER must be set in .env${NC}"
echo "Please add them to your .env file:"
echo "VPS_USER=your-username"
echo "VPS_HOST=your-ip-address"
@@ -18,15 +33,36 @@ fi
# Default remote directory to ~/Aurora if not specified
REMOTE_DIR="${VPS_PROJECT_PATH:-~/Aurora}"
SKIP_BACKUP=false
echo -e "\033[1;33m🚀 Deploying to $VPS_USER@$VPS_HOST:$REMOTE_DIR...\033[0m"
if [[ "$1" == "--skip-backup" ]]; then
SKIP_BACKUP=true
fi
# Execute commands on remote server
ssh -t "$VPS_USER@$VPS_HOST" "cd $REMOTE_DIR && \
echo '⬇️ Pulling latest changes...' && \
git pull && \
echo '🏗️ Building production containers...' && \
docker compose -f docker-compose.prod.yml build && \
echo '🚀 Starting services...' && \
docker compose -f docker-compose.prod.yml up -d && \
echo '✅ Deployment complete!'"
echo -e "${YELLOW}🚀 Deploying to $VPS_USER@$VPS_HOST:$REMOTE_DIR...${NC}"
# Step 1: Database backup (unless skipped)
if [ "$SKIP_BACKUP" = false ]; then
echo -e "\n${YELLOW}💾 Step 1/4: Backing up remote database...${NC}"
ssh -t "$VPS_USER@$VPS_HOST" "cd $REMOTE_DIR && bash shared/scripts/db-backup.sh" || {
echo -e "${RED}⚠️ Backup failed. Aborting deployment.${NC}"
echo "Use --skip-backup to deploy without backing up."
exit 1
}
else
echo -e "\n${YELLOW}⏭️ Step 1/4: Skipping database backup (--skip-backup)${NC}"
fi
# Step 2: Pull latest code
echo -e "\n${YELLOW}⬇️ Step 2/4: Pulling latest changes...${NC}"
ssh -t "$VPS_USER@$VPS_HOST" "cd $REMOTE_DIR && git pull"
# Step 3: Build production containers
echo -e "\n${YELLOW}🏗️ Step 3/4: Building production containers...${NC}"
ssh -t "$VPS_USER@$VPS_HOST" "cd $REMOTE_DIR && docker compose -f docker-compose.prod.yml build"
# Step 4: Restart services
echo -e "\n${YELLOW}🚀 Step 4/4: Starting services...${NC}"
ssh -t "$VPS_USER@$VPS_HOST" "cd $REMOTE_DIR && docker compose -f docker-compose.prod.yml up -d"
echo -e "\n${GREEN}✅ Deployment complete!${NC}"

0
shared/scripts/deploy.sh Normal file → Executable file
View File

View File

@@ -1,14 +1,31 @@
#!/bin/bash
# Cleanup script for Docker resources
# Use: ./shared/scripts/docker-cleanup.sh
# Use: ./shared/scripts/docker-cleanup.sh --full (for aggressive cleanup)
# =============================================================================
# Aurora Docker Cleanup Script
# =============================================================================
# Cleans up Docker resources to free disk space.
#
# Usage: ./docker-cleanup.sh (interactive mode)
# ./docker-cleanup.sh --full (automatic full cleanup)
# =============================================================================
set -e
echo "🧹 Aurora Docker Cleanup"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
echo -e "${YELLOW}🧹 Aurora Docker Cleanup${NC}"
echo "========================"
echo ""
# Verify Docker is running
if ! docker info > /dev/null 2>&1; then
echo -e "${RED}Error: Docker is not running.${NC}"
exit 1
fi
# Show current disk usage first
echo "📊 Current Docker disk usage:"
docker system df
@@ -26,7 +43,7 @@ docker image prune -f
# Check for --full flag for aggressive cleanup
if [[ "$1" == "--full" ]]; then
echo ""
echo "🔥 Full cleanup mode - removing all unused Docker resources..."
echo -e "${YELLOW}🔥 Full cleanup mode - removing all unused Docker resources...${NC}"
# Remove all unused images, not just dangling ones
echo " → Removing unused images..."
@@ -49,7 +66,7 @@ if [[ "$1" == "--full" ]]; then
docker volume rm aurora_app_node_modules aurora_web_node_modules 2>/dev/null || true
echo ""
echo "✅ Full cleanup complete!"
echo -e "${GREEN}✅ Full cleanup complete!${NC}"
else
# Interactive mode
echo ""
@@ -57,7 +74,7 @@ else
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker builder prune -f
echo "✓ Build cache cleared"
echo -e "${GREEN}${NC} Build cache cleared"
fi
echo ""
@@ -65,7 +82,7 @@ else
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker image prune -a -f
echo "✓ Unused images removed"
echo -e "${GREEN}${NC} Unused images removed"
fi
echo ""
@@ -73,7 +90,7 @@ else
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker volume rm aurora_app_node_modules aurora_web_node_modules 2>/dev/null || true
echo "✓ Node modules volumes removed"
echo -e "${GREEN}${NC} Node modules volumes removed"
fi
echo ""
@@ -81,11 +98,11 @@ else
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker system prune -a -f --volumes
echo "✓ Full system prune complete"
echo -e "${GREEN}${NC} Full system prune complete"
fi
echo ""
echo "✅ Cleanup complete!"
echo -e "${GREEN}✅ Cleanup complete!${NC}"
fi
echo ""

View File

@@ -2,37 +2,53 @@
# =============================================================================
# Aurora Log Viewer
# =============================================================================
# Usage: ./logs.sh [app|db|all] [-f]
# Default: app container, follow mode
# Usage: ./logs.sh [app|db|all] [-f|--follow]
# Default: app container, no follow (shows last 100 lines)
#
# Examples:
# ./logs.sh # Last 100 lines of app logs
# ./logs.sh -f # Follow app logs in real-time
# ./logs.sh db -f # Follow db logs in real-time
# ./logs.sh all # Last 100 lines of all logs
# =============================================================================
SERVICE=${1:-app}
FOLLOW="-f"
SERVICE="app"
FOLLOW=""
TAIL_LINES="--tail=100"
if [[ "$1" == "-f" ]]; then
SERVICE="app"
FOLLOW="-f"
elif [[ "$2" == "-f" ]]; then
FOLLOW="-f"
elif [[ "$2" == "--no-follow" ]]; then
FOLLOW=""
fi
# Parse arguments (order-independent)
for arg in "$@"; do
case $arg in
-f|--follow)
FOLLOW="-f"
TAIL_LINES="" # When following, start from current output
;;
app|db|all)
SERVICE="$arg"
;;
-*)
echo "Unknown option: $arg"
echo "Usage: ./logs.sh [app|db|all] [-f|--follow]"
exit 1
;;
esac
done
echo "📋 Fetching logs for service: $SERVICE..."
case $SERVICE in
app)
docker compose logs $FOLLOW app
docker compose logs $FOLLOW $TAIL_LINES app
;;
db)
docker compose logs $FOLLOW db
docker compose logs $FOLLOW $TAIL_LINES db
;;
all)
docker compose logs $FOLLOW
docker compose logs $FOLLOW $TAIL_LINES
;;
*)
echo "Unknown service: $SERVICE"
echo "Usage: ./logs.sh [app|db|all] [-f]"
echo "Usage: ./logs.sh [app|db|all] [-f|--follow]"
exit 1
;;
esac

View File

@@ -1,5 +1,14 @@
#!/bin/bash
# =============================================================================
# Aurora Remote Tunnel Script
# =============================================================================
# Establishes secure SSH tunnels to remote Aurora services.
# Uses autossh for automatic reconnection if available.
#
# Usage: ./remote.sh
# =============================================================================
# Load environment variables
if [ -f .env ]; then
set -a
@@ -26,21 +35,6 @@ echo ""
echo "Press Ctrl+C to stop all connections."
echo ""
# Function to open browser (cross-platform)
open_browser() {
sleep 2 # Wait for tunnel to establish
if command -v open &> /dev/null; then
# macOS
open "http://localhost:$DASHBOARD_PORT"
elif command -v xdg-open &> /dev/null; then
# Linux
xdg-open "http://localhost:$DASHBOARD_PORT"
elif command -v start &> /dev/null; then
# Windows (Git Bash)
start "http://localhost:$DASHBOARD_PORT"
fi
}
# Check if autossh is available for auto-reconnection
if command -v autossh &> /dev/null; then
echo "✅ Using autossh for automatic reconnection"

0
shared/scripts/setup-server.sh Normal file → Executable file
View File

View File

@@ -92,7 +92,7 @@ if [ -n "$1" ]; then
EXIT_CODE=1
fi
else
if bash shared/scripts/test-sequential.sh; then
if bash shared/scripts/test-sequential.sh --integration; then
echo "✅ CI Simulation Passed!"
EXIT_CODE=0
else

View File

@@ -1,8 +1,17 @@
#!/bin/bash
set -e
INCLUDE_INTEGRATION=false
if [[ "$1" == "--integration" ]]; then
INCLUDE_INTEGRATION=true
fi
echo "🔍 Finding test files..."
TEST_FILES=$(find . -name "*.test.ts" -not -path "*/node_modules/*")
if [ "$INCLUDE_INTEGRATION" = true ]; then
TEST_FILES=$(find . -name "*.test.ts" -not -path "*/node_modules/*")
else
TEST_FILES=$(find . -name "*.test.ts" -not -name "*.integration.test.ts" -not -path "*/node_modules/*")
fi
if [ -z "$TEST_FILES" ]; then
echo "⚠️ No test files found!"
@@ -10,6 +19,10 @@ if [ -z "$TEST_FILES" ]; then
fi
echo "🧪 Running tests sequentially..."
if [ "$INCLUDE_INTEGRATION" = true ]; then
echo " (including integration tests)"
fi
FAILED=0
for FILE in $TEST_FILES; do

View File

@@ -1,40 +1,57 @@
import { describe, expect, it, mock, beforeEach, afterEach, jest } from "bun:test";
import { type WebServerInstance } from "./server";
// Mock the dependencies
const mockConfig = {
// Mock gameSettingsService — the route now uses this instead of config/saveConfig
const mockSettings = {
leveling: {
base: 100,
exponent: 1.5,
chat: { minXp: 10, maxXp: 20, cooldownMs: 60000 }
},
economy: {
daily: { amount: 100n, streakBonus: 10n, weeklyBonus: 50n, cooldownMs: 86400000 },
transfers: { allowSelfTransfer: false, minAmount: 50n },
daily: { amount: "100", streakBonus: "10", weeklyBonus: "50", cooldownMs: 86400000 },
transfers: { allowSelfTransfer: false, minAmount: "1" },
exam: { multMin: 1.5, multMax: 2.5 }
},
inventory: { maxStackSize: 99n, maxSlots: 20 },
inventory: { maxStackSize: "99", maxSlots: 20 },
lootdrop: {
spawnChance: 0.1,
cooldownMs: 3600000,
minMessages: 10,
activityWindowMs: 300000,
reward: { min: 100, max: 500, currency: "gold" }
},
commands: { "help": true },
system: {},
moderation: {
prune: { maxAmount: 100, confirmThreshold: 50, batchSize: 100, batchDelayMs: 1000 },
cases: { dmOnWarn: true }
},
trivia: {
entryFee: "50",
rewardMultiplier: 1.5,
timeoutSeconds: 30,
cooldownMs: 60000,
categories: [],
difficulty: "random"
}
};
const mockSaveConfig = jest.fn();
const mockGetSettings = jest.fn(() => Promise.resolve(mockSettings));
const mockUpsertSettings = jest.fn(() => Promise.resolve(mockSettings));
const mockGetDefaults = jest.fn(() => mockSettings);
// Mock @shared/lib/config using mock.module
mock.module("@shared/lib/config", () => ({
config: mockConfig,
saveConfig: mockSaveConfig,
GameConfigType: {}
mock.module("@shared/modules/game-settings/game-settings.service", () => ({
gameSettingsService: {
getSettings: mockGetSettings,
upsertSettings: mockUpsertSettings,
getDefaults: mockGetDefaults,
invalidateCache: jest.fn(),
}
}));
// Mock DrizzleClient (dependency potentially imported transitively)
mock.module("@shared/db/DrizzleClient", () => ({
DrizzleClient: {}
}));
// Mock @shared/lib/utils (deepMerge is used by settings API)
@@ -104,6 +121,8 @@ describe("Settings API", () => {
beforeEach(async () => {
jest.clearAllMocks();
mockGetSettings.mockImplementation(() => Promise.resolve(mockSettings));
mockUpsertSettings.mockImplementation(() => Promise.resolve(mockSettings));
serverInstance = await createWebServer({ port: PORT, hostname: HOSTNAME });
});
@@ -117,18 +136,14 @@ describe("Settings API", () => {
const res = await fetch(`${BASE_URL}/api/settings`);
expect(res.status).toBe(200);
const data = await res.json();
// Check if BigInts are converted to strings
const data = await res.json() as any;
// Check values come through correctly
expect(data.economy.daily.amount).toBe("100");
expect(data.leveling.base).toBe(100);
});
it("POST /api/settings should save valid configuration via merge", async () => {
// We only send a partial update, expecting the server to merge it
// Note: For now the server implementation might still default to overwrite if we haven't updated it yet.
// But the user requested "partial vs full" fix.
// Let's assume we implement the merge logic.
const partialConfig = { studentRole: "new-role-partial" };
const partialConfig = { economy: { daily: { amount: "200" } } };
const res = await fetch(`${BASE_URL}/api/settings`, {
method: "POST",
@@ -137,26 +152,27 @@ describe("Settings API", () => {
});
expect(res.status).toBe(200);
// Expect saveConfig to be called with the MERGED result
expect(mockSaveConfig).toHaveBeenCalledWith(expect.objectContaining({
studentRole: "new-role-partial",
leveling: mockConfig.leveling // Should keep existing values
}));
// upsertSettings should be called with the partial config
expect(mockUpsertSettings).toHaveBeenCalledWith(
expect.objectContaining({
economy: { daily: { amount: "200" } }
})
);
});
it("POST /api/settings should return 400 when save fails", async () => {
mockSaveConfig.mockImplementationOnce(() => {
mockUpsertSettings.mockImplementationOnce(() => {
throw new Error("Validation failed");
});
const res = await fetch(`${BASE_URL}/api/settings`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({}) // Empty might be valid partial, but mocks throw
body: JSON.stringify({})
});
expect(res.status).toBe(400);
const data = await res.json();
const data = await res.json() as any;
expect(data.details).toBe("Validation failed");
});
@@ -164,7 +180,7 @@ describe("Settings API", () => {
const res = await fetch(`${BASE_URL}/api/settings/meta`);
expect(res.status).toBe(200);
const data = await res.json();
const data = await res.json() as any;
expect(data.roles).toHaveLength(2);
expect(data.roles[0]).toEqual({ id: "role1", name: "Admin", color: "#ffffff" });
expect(data.channels[0]).toEqual({ id: "chan1", name: "general", type: 0 });

View File

@@ -1,6 +1,5 @@
import { describe, test, expect, afterAll, mock } from "bun:test";
import type { WebServerInstance } from "./server";
import { createWebServer } from "./server";
interface MockBotStats {
bot: { name: string; avatarUrl: string | null };
@@ -13,21 +12,21 @@ interface MockBotStats {
}
// 1. Mock DrizzleClient (dependency of dashboardService)
// Must provide full chainable builder for select().from().leftJoin().groupBy().orderBy().limit()
mock.module("@shared/db/DrizzleClient", () => {
const mockBuilder = {
where: mock(() => Promise.resolve([{ count: "5", balance: 1000n, level: 5, dailyStreak: 2 }])),
then: (onfulfilled: any) => onfulfilled([{ count: "5", balance: 1000n, level: 5, dailyStreak: 2 }]),
orderBy: mock(() => mockBuilder), // Chainable
limit: mock(() => Promise.resolve([])), // Terminal
};
const mockFrom = {
from: mock(() => mockBuilder),
};
const mockBuilder: Record<string, any> = {};
// Every chainable method returns mock builder; terminal calls return resolved promise
mockBuilder.where = mock(() => Promise.resolve([{ count: "5", balance: 1000n, level: 5, dailyStreak: 2 }]));
mockBuilder.then = (onfulfilled: any) => onfulfilled([{ count: "5", balance: 1000n, level: 5, dailyStreak: 2 }]);
mockBuilder.orderBy = mock(() => mockBuilder);
mockBuilder.limit = mock(() => Promise.resolve([]));
mockBuilder.leftJoin = mock(() => mockBuilder);
mockBuilder.groupBy = mock(() => mockBuilder);
mockBuilder.from = mock(() => mockBuilder);
return {
DrizzleClient: {
select: mock(() => mockFrom),
select: mock(() => mockBuilder),
query: {
transactions: { findMany: mock(() => Promise.resolve([])) },
moderationCases: { findMany: mock(() => Promise.resolve([])) },
@@ -54,7 +53,31 @@ mock.module("../../bot/lib/clientStats", () => ({
})),
}));
// 3. System Events (No mock needed, use real events)
// 3. Mock config (used by lootdrop.service.getLootdropState)
mock.module("@shared/lib/config", () => ({
config: {
lootdrop: {
activityWindowMs: 120000,
minMessages: 1,
spawnChance: 1,
cooldownMs: 3000,
reward: { min: 40, max: 150, currency: "Astral Units" }
}
}
}));
// 4. Mock BotClient (used by stats helper for maintenanceMode)
mock.module("../../bot/lib/BotClient", () => ({
AuroraClient: {
maintenanceMode: false,
guilds: { cache: { get: () => null } },
commands: [],
knownCommands: new Map(),
}
}));
// Import after all mocks are set up
import { createWebServer } from "./server";
describe("WebServer Security & Limits", () => {
const port = 3001;