forked from syntaxbullet/aurorabot
Compare commits
26 Commits
afe82c449b
...
e252d6e00a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e252d6e00a | ||
|
|
95f1b4e04a | ||
|
|
62c6ca5e87 | ||
|
|
aac9be19f2 | ||
|
|
bb823c86c1 | ||
|
|
119301f1c3 | ||
|
|
9a2fc101da | ||
|
|
7049cbfd9d | ||
|
|
db859e8f12 | ||
|
|
5ff3fa9ab5 | ||
|
|
c8bf69a969 | ||
|
|
fee4969910 | ||
|
|
dabcb4cab3 | ||
|
|
1a3f5c6654 | ||
|
|
422db6479b | ||
|
|
35ecea16f7 | ||
|
|
9ff679ee5c | ||
|
|
ebefd8c0df | ||
|
|
73531f38ae | ||
|
|
5a6356d271 | ||
|
|
f9dafeac3b | ||
|
|
1a2bbb011c | ||
|
|
2ead35789d | ||
|
|
c1da71227d | ||
|
|
17e636c4e5 | ||
|
|
d7543d9f48 |
39
.dockerignore
Normal file
39
.dockerignore
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Dependencies - handled inside container
|
||||||
|
node_modules
|
||||||
|
web/node_modules
|
||||||
|
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
|
||||||
|
# Logs and data
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
shared/db/data
|
||||||
|
shared/db/log
|
||||||
|
|
||||||
|
# Development tools
|
||||||
|
.env
|
||||||
|
.env.example
|
||||||
|
.opencode
|
||||||
|
.agent
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
docs
|
||||||
|
*.md
|
||||||
|
!README.md
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.vscode
|
||||||
|
.idea
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# OS
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Build artifacts
|
||||||
|
dist
|
||||||
|
.cache
|
||||||
|
*.tsbuildinfo
|
||||||
18
.env.example
18
.env.example
@@ -1,12 +1,26 @@
|
|||||||
|
# =============================================================================
|
||||||
|
# Aurora Environment Configuration
|
||||||
|
# =============================================================================
|
||||||
|
# Copy this file to .env and update with your values
|
||||||
|
# For production, see .env.prod.example with security recommendations
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Database
|
||||||
|
# For production: use a strong password (openssl rand -base64 32)
|
||||||
DB_USER=aurora
|
DB_USER=aurora
|
||||||
DB_PASSWORD=aurora
|
DB_PASSWORD=aurora
|
||||||
DB_NAME=aurora
|
DB_NAME=aurora
|
||||||
DB_PORT=5432
|
DB_PORT=5432
|
||||||
DB_HOST=db
|
DB_HOST=db
|
||||||
|
DATABASE_URL=postgres://aurora:aurora@db:5432/aurora
|
||||||
|
|
||||||
|
# Discord
|
||||||
|
# Get from: https://discord.com/developers/applications
|
||||||
DISCORD_BOT_TOKEN=your-discord-bot-token
|
DISCORD_BOT_TOKEN=your-discord-bot-token
|
||||||
DISCORD_CLIENT_ID=your-discord-client-id
|
DISCORD_CLIENT_ID=your-discord-client-id
|
||||||
DISCORD_GUILD_ID=your-discord-guild-id
|
DISCORD_GUILD_ID=your-discord-guild-id
|
||||||
DATABASE_URL=postgres://aurora:aurora@db:5432/aurora
|
|
||||||
|
|
||||||
VPS_USER=your-vps-user
|
# Server (for remote access scripts)
|
||||||
|
# Use a non-root user (see shared/scripts/setup-server.sh)
|
||||||
|
VPS_USER=deploy
|
||||||
VPS_HOST=your-vps-ip
|
VPS_HOST=your-vps-ip
|
||||||
|
|||||||
38
.env.prod.example
Normal file
38
.env.prod.example
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# =============================================================================
|
||||||
|
# Aurora Production Environment Template
|
||||||
|
# =============================================================================
|
||||||
|
# Copy this file to .env and fill in the values
|
||||||
|
# IMPORTANT: Use strong, unique passwords in production!
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Database Configuration
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Generate strong password: openssl rand -base64 32
|
||||||
|
DB_USER=aurora_prod
|
||||||
|
DB_PASSWORD=CHANGE_ME_USE_STRONG_PASSWORD
|
||||||
|
DB_NAME=aurora_prod
|
||||||
|
DB_PORT=5432
|
||||||
|
DB_HOST=localhost
|
||||||
|
|
||||||
|
# Constructed database URL (used by Drizzle)
|
||||||
|
DATABASE_URL=postgres://${DB_USER}:${DB_PASSWORD}@localhost:${DB_PORT}/${DB_NAME}
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Discord Configuration
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Get these from Discord Developer Portal: https://discord.com/developers
|
||||||
|
DISCORD_BOT_TOKEN=your_bot_token_here
|
||||||
|
DISCORD_CLIENT_ID=your_client_id_here
|
||||||
|
DISCORD_GUILD_ID=your_guild_id_here
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Server Configuration (for SSH deployment scripts)
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Use a non-root user for security!
|
||||||
|
VPS_USER=deploy
|
||||||
|
VPS_HOST=your_server_ip_here
|
||||||
|
|
||||||
|
# Optional: Custom ports for remote access
|
||||||
|
# DASHBOARD_PORT=3000
|
||||||
|
# STUDIO_PORT=4983
|
||||||
6
.env.test
Normal file
6
.env.test
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
DATABASE_URL="postgresql://auroradev:auroradev123@localhost:5432/aurora_test"
|
||||||
|
DISCORD_BOT_TOKEN="test_token"
|
||||||
|
DISCORD_CLIENT_ID="123456789"
|
||||||
|
DISCORD_GUILD_ID="123456789"
|
||||||
|
ADMIN_TOKEN="admin_token_123"
|
||||||
|
LOG_LEVEL="error"
|
||||||
204
.github/workflows/deploy.yml
vendored
Normal file
204
.github/workflows/deploy.yml
vendored
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
# Aurora CI/CD Pipeline
|
||||||
|
# Builds, tests, and deploys to production server
|
||||||
|
|
||||||
|
name: Deploy to Production
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
workflow_dispatch: # Allow manual trigger
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: ${{ github.repository }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ==========================================================================
|
||||||
|
# Test Job
|
||||||
|
# ==========================================================================
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:17-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
POSTGRES_DB: aurora_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Bun
|
||||||
|
uses: oven-sh/setup-bun@v2
|
||||||
|
with:
|
||||||
|
bun-version: latest
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
bun install --frozen-lockfile
|
||||||
|
cd web && bun install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Create Config File
|
||||||
|
run: |
|
||||||
|
mkdir -p shared/config
|
||||||
|
cat <<EOF > shared/config/config.json
|
||||||
|
{
|
||||||
|
"leveling": { "base": 100, "exponent": 2.5, "chat": { "cooldownMs": 60000, "minXp": 15, "maxXp": 25 } },
|
||||||
|
"economy": {
|
||||||
|
"daily": { "amount": "100", "streakBonus": "10", "weeklyBonus": "50", "cooldownMs": 86400000 },
|
||||||
|
"transfers": { "allowSelfTransfer": false, "minAmount": "1" },
|
||||||
|
"exam": { "multMin": 0.05, "multMax": 0.03 }
|
||||||
|
},
|
||||||
|
"inventory": { "maxStackSize": "99", "maxSlots": 50 },
|
||||||
|
"commands": {},
|
||||||
|
"lootdrop": {
|
||||||
|
"activityWindowMs": 120000, "minMessages": 1, "spawnChance": 1, "cooldownMs": 3000,
|
||||||
|
"reward": { "min": 40, "max": 150, "currency": "Astral Units" }
|
||||||
|
},
|
||||||
|
"studentRole": "123", "visitorRole": "456", "colorRoles": [],
|
||||||
|
"moderation": {
|
||||||
|
"prune": { "maxAmount": 100, "confirmThreshold": 50, "batchSize": 100, "batchDelayMs": 1000 },
|
||||||
|
"cases": { "dmOnWarn": false }
|
||||||
|
},
|
||||||
|
"trivia": {
|
||||||
|
"entryFee": "50", "rewardMultiplier": 1.5, "timeoutSeconds": 30, "cooldownMs": 60000,
|
||||||
|
"categories": [], "difficulty": "random"
|
||||||
|
},
|
||||||
|
"system": {}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Setup Test Database
|
||||||
|
run: bun run db:push:local
|
||||||
|
env:
|
||||||
|
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/aurora_test
|
||||||
|
# Create .env.test for implicit usage by bun
|
||||||
|
DISCORD_BOT_TOKEN: test_token
|
||||||
|
DISCORD_CLIENT_ID: 123
|
||||||
|
DISCORD_GUILD_ID: 123
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: |
|
||||||
|
# Create .env.test for test-sequential.sh / bun test
|
||||||
|
cat <<EOF > .env.test
|
||||||
|
DATABASE_URL="postgresql://postgres:postgres@postgres:5432/aurora_test"
|
||||||
|
DISCORD_BOT_TOKEN="test_token"
|
||||||
|
DISCORD_CLIENT_ID="123456789"
|
||||||
|
DISCORD_GUILD_ID="123456789"
|
||||||
|
ADMIN_TOKEN="admin_token_123"
|
||||||
|
LOG_LEVEL="error"
|
||||||
|
EOF
|
||||||
|
bash shared/scripts/test-sequential.sh
|
||||||
|
env:
|
||||||
|
NODE_ENV: test
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# Build Job
|
||||||
|
# ==========================================================================
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
outputs:
|
||||||
|
image_tag: ${{ steps.meta.outputs.tags }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=sha,prefix=
|
||||||
|
type=raw,value=latest
|
||||||
|
|
||||||
|
- name: Build and Push Docker Image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile.prod
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# Deploy Job
|
||||||
|
# ==========================================================================
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
environment: production
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Deploy to Production Server
|
||||||
|
uses: appleboy/ssh-action@v1.0.3
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.VPS_HOST }}
|
||||||
|
username: ${{ secrets.VPS_USER }}
|
||||||
|
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
script: |
|
||||||
|
cd ~/Aurora
|
||||||
|
|
||||||
|
# Pull latest code
|
||||||
|
git pull origin main
|
||||||
|
|
||||||
|
# Pull latest Docker image
|
||||||
|
docker compose -f docker-compose.prod.yml pull 2>/dev/null || true
|
||||||
|
|
||||||
|
# Build and restart containers
|
||||||
|
docker compose -f docker-compose.prod.yml build --no-cache
|
||||||
|
docker compose -f docker-compose.prod.yml down
|
||||||
|
docker compose -f docker-compose.prod.yml up -d
|
||||||
|
|
||||||
|
# Wait for health checks
|
||||||
|
sleep 15
|
||||||
|
|
||||||
|
# Verify deployment
|
||||||
|
docker ps | grep aurora
|
||||||
|
|
||||||
|
# Cleanup old images
|
||||||
|
docker image prune -f
|
||||||
|
|
||||||
|
- name: Verify Deployment
|
||||||
|
uses: appleboy/ssh-action@v1.0.3
|
||||||
|
with:
|
||||||
|
host: ${{ secrets.VPS_HOST }}
|
||||||
|
username: ${{ secrets.VPS_USER }}
|
||||||
|
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
script: |
|
||||||
|
# Check if app container is healthy
|
||||||
|
if docker ps | grep -q "aurora_app.*healthy"; then
|
||||||
|
echo "✅ Deployment successful - aurora_app is healthy"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "⚠️ Health check pending, checking container status..."
|
||||||
|
docker ps | grep aurora
|
||||||
|
docker logs aurora_app --tail 20
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -45,4 +45,5 @@ report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
|||||||
|
|
||||||
src/db/data
|
src/db/data
|
||||||
src/db/log
|
src/db/log
|
||||||
scratchpad/
|
scratchpad/
|
||||||
|
tickets/
|
||||||
132
AGENTS.md
132
AGENTS.md
@@ -12,7 +12,7 @@ bun --watch bot/index.ts # Run bot with hot reload
|
|||||||
bun --hot web/src/index.ts # Run web dashboard with hot reload
|
bun --hot web/src/index.ts # Run web dashboard with hot reload
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
bun test # Run all tests
|
bun test # Run all tests ( expect some tests to fail when running all at once like this due to the nature of the tests )
|
||||||
bun test path/to/file.test.ts # Run single test file
|
bun test path/to/file.test.ts # Run single test file
|
||||||
bun test --watch # Watch mode
|
bun test --watch # Watch mode
|
||||||
bun test shared/modules/economy # Run tests in directory
|
bun test shared/modules/economy # Run tests in directory
|
||||||
@@ -71,6 +71,7 @@ import { localHelper } from "./helper";
|
|||||||
```
|
```
|
||||||
|
|
||||||
**Available Aliases:**
|
**Available Aliases:**
|
||||||
|
|
||||||
- `@/*` - bot/
|
- `@/*` - bot/
|
||||||
- `@shared/*` - shared/
|
- `@shared/*` - shared/
|
||||||
- `@db/*` - shared/db/
|
- `@db/*` - shared/db/
|
||||||
@@ -80,17 +81,17 @@ import { localHelper } from "./helper";
|
|||||||
|
|
||||||
## Naming Conventions
|
## Naming Conventions
|
||||||
|
|
||||||
| Element | Convention | Example |
|
| Element | Convention | Example |
|
||||||
|---------|------------|---------|
|
| ---------------- | ----------------------- | ---------------------------------------- |
|
||||||
| Files | camelCase or kebab-case | `BotClient.ts`, `economy.service.ts` |
|
| Files | camelCase or kebab-case | `BotClient.ts`, `economy.service.ts` |
|
||||||
| Classes | PascalCase | `CommandHandler`, `UserError` |
|
| Classes | PascalCase | `CommandHandler`, `UserError` |
|
||||||
| Functions | camelCase | `createCommand`, `handleShopInteraction` |
|
| Functions | camelCase | `createCommand`, `handleShopInteraction` |
|
||||||
| Constants | UPPER_SNAKE_CASE | `EVENTS`, `BRANDING` |
|
| Constants | UPPER_SNAKE_CASE | `EVENTS`, `BRANDING` |
|
||||||
| Enums | PascalCase | `TimerType`, `TransactionType` |
|
| Enums | PascalCase | `TimerType`, `TransactionType` |
|
||||||
| Services | camelCase singleton | `economyService`, `userService` |
|
| Services | camelCase singleton | `economyService`, `userService` |
|
||||||
| Types/Interfaces | PascalCase | `Command`, `Event`, `GameConfigType` |
|
| Types/Interfaces | PascalCase | `Command`, `Event`, `GameConfigType` |
|
||||||
| DB tables | snake_case | `users`, `moderation_cases` |
|
| DB tables | snake_case | `users`, `moderation_cases` |
|
||||||
| Custom IDs | snake_case with prefix | `shop_buy_`, `trade_accept_` |
|
| Custom IDs | snake_case with prefix | `shop_buy_`, `trade_accept_` |
|
||||||
|
|
||||||
## Code Patterns
|
## Code Patterns
|
||||||
|
|
||||||
@@ -98,13 +99,13 @@ import { localHelper } from "./helper";
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
export const commandName = createCommand({
|
export const commandName = createCommand({
|
||||||
data: new SlashCommandBuilder()
|
data: new SlashCommandBuilder()
|
||||||
.setName("commandname")
|
.setName("commandname")
|
||||||
.setDescription("Description"),
|
.setDescription("Description"),
|
||||||
execute: async (interaction) => {
|
execute: async (interaction) => {
|
||||||
await interaction.deferReply();
|
await interaction.deferReply();
|
||||||
// Implementation
|
// Implementation
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -112,11 +113,11 @@ export const commandName = createCommand({
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
export const serviceName = {
|
export const serviceName = {
|
||||||
methodName: async (params: ParamType): Promise<ReturnType> => {
|
methodName: async (params: ParamType): Promise<ReturnType> => {
|
||||||
return await withTransaction(async (tx) => {
|
return await withTransaction(async (tx) => {
|
||||||
// Database operations
|
// Database operations
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -146,15 +147,17 @@ throw new SystemError("Database connection failed");
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
try {
|
try {
|
||||||
const result = await service.method();
|
const result = await service.method();
|
||||||
await interaction.editReply({ embeds: [createSuccessEmbed(result)] });
|
await interaction.editReply({ embeds: [createSuccessEmbed(result)] });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof UserError) {
|
if (error instanceof UserError) {
|
||||||
await interaction.editReply({ embeds: [createErrorEmbed(error.message)] });
|
await interaction.editReply({ embeds: [createErrorEmbed(error.message)] });
|
||||||
} else {
|
} else {
|
||||||
console.error("Unexpected error:", error);
|
console.error("Unexpected error:", error);
|
||||||
await interaction.editReply({ embeds: [createErrorEmbed("An unexpected error occurred.")] });
|
await interaction.editReply({
|
||||||
}
|
embeds: [createErrorEmbed("An unexpected error occurred.")],
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -166,15 +169,18 @@ try {
|
|||||||
import { withTransaction } from "@/lib/db";
|
import { withTransaction } from "@/lib/db";
|
||||||
|
|
||||||
return await withTransaction(async (tx) => {
|
return await withTransaction(async (tx) => {
|
||||||
const user = await tx.query.users.findFirst({
|
const user = await tx.query.users.findFirst({
|
||||||
where: eq(users.id, discordId)
|
where: eq(users.id, discordId),
|
||||||
});
|
});
|
||||||
|
|
||||||
await tx.update(users).set({ coins: newBalance }).where(eq(users.id, discordId));
|
await tx
|
||||||
await tx.insert(transactions).values({ userId: discordId, amount, type });
|
.update(users)
|
||||||
|
.set({ coins: newBalance })
|
||||||
return user;
|
.where(eq(users.id, discordId));
|
||||||
}, existingTx); // Pass existing tx if in nested transaction
|
await tx.insert(transactions).values({ userId: discordId, amount, type });
|
||||||
|
|
||||||
|
return user;
|
||||||
|
}, existingTx); // Pass existing tx if in nested transaction
|
||||||
```
|
```
|
||||||
|
|
||||||
### Schema Notes
|
### Schema Notes
|
||||||
@@ -192,25 +198,25 @@ import { describe, it, expect, mock, beforeEach } from "bun:test";
|
|||||||
|
|
||||||
// Mock modules BEFORE imports
|
// Mock modules BEFORE imports
|
||||||
mock.module("@shared/db/DrizzleClient", () => ({
|
mock.module("@shared/db/DrizzleClient", () => ({
|
||||||
DrizzleClient: { query: mockQuery }
|
DrizzleClient: { query: mockQuery },
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe("serviceName", () => {
|
describe("serviceName", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockFn.mockClear();
|
mockFn.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should handle expected case", async () => {
|
it("should handle expected case", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
mockFn.mockResolvedValue(testData);
|
mockFn.mockResolvedValue(testData);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const result = await service.method(input);
|
const result = await service.method(input);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(result).toEqual(expected);
|
expect(result).toEqual(expected);
|
||||||
expect(mockFn).toHaveBeenCalledWith(expectedArgs);
|
expect(mockFn).toHaveBeenCalledWith(expectedArgs);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -227,12 +233,12 @@ describe("serviceName", () => {
|
|||||||
|
|
||||||
## Key Files Reference
|
## Key Files Reference
|
||||||
|
|
||||||
| Purpose | File |
|
| Purpose | File |
|
||||||
|---------|------|
|
| ------------- | ---------------------- |
|
||||||
| Bot entry | `bot/index.ts` |
|
| Bot entry | `bot/index.ts` |
|
||||||
| DB schema | `shared/db/schema.ts` |
|
| DB schema | `shared/db/schema.ts` |
|
||||||
| Error classes | `shared/lib/errors.ts` |
|
| Error classes | `shared/lib/errors.ts` |
|
||||||
| Config loader | `shared/lib/config.ts` |
|
| Config loader | `shared/lib/config.ts` |
|
||||||
| Environment | `shared/lib/env.ts` |
|
| Environment | `shared/lib/env.ts` |
|
||||||
| Embed helpers | `bot/lib/embeds.ts` |
|
| Embed helpers | `bot/lib/embeds.ts` |
|
||||||
| Command utils | `shared/lib/utils.ts` |
|
| Command utils | `shared/lib/utils.ts` |
|
||||||
|
|||||||
54
Dockerfile
54
Dockerfile
@@ -1,21 +1,55 @@
|
|||||||
|
# ============================================
|
||||||
|
# Base stage - shared configuration
|
||||||
|
# ============================================
|
||||||
FROM oven/bun:latest AS base
|
FROM oven/bun:latest AS base
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install system dependencies
|
# Install system dependencies with cleanup in same layer
|
||||||
RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends git && \
|
||||||
|
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
# Install root project dependencies
|
# ============================================
|
||||||
|
# Dependencies stage - installs all deps
|
||||||
|
# ============================================
|
||||||
|
FROM base AS deps
|
||||||
|
|
||||||
|
# Copy only package files first (better layer caching)
|
||||||
COPY package.json bun.lock ./
|
COPY package.json bun.lock ./
|
||||||
RUN bun install --frozen-lockfile
|
|
||||||
|
|
||||||
# Install web project dependencies
|
|
||||||
COPY web/package.json web/bun.lock ./web/
|
COPY web/package.json web/bun.lock ./web/
|
||||||
RUN cd web && bun install --frozen-lockfile
|
|
||||||
|
|
||||||
# Copy source code
|
# Install all dependencies in one layer
|
||||||
COPY . .
|
RUN bun install --frozen-lockfile && \
|
||||||
|
cd web && bun install --frozen-lockfile
|
||||||
|
|
||||||
# Expose ports (3000 for web dashboard)
|
# ============================================
|
||||||
|
# Development stage - for local dev with volume mounts
|
||||||
|
# ============================================
|
||||||
|
FROM base AS development
|
||||||
|
|
||||||
|
# Copy dependencies from deps stage
|
||||||
|
COPY --from=deps /app/node_modules ./node_modules
|
||||||
|
COPY --from=deps /app/web/node_modules ./web/node_modules
|
||||||
|
|
||||||
|
# Expose ports
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
# Default command
|
||||||
|
CMD ["bun", "run", "dev"]
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# Production stage - full app with source code
|
||||||
|
# ============================================
|
||||||
|
FROM base AS production
|
||||||
|
|
||||||
|
# Copy dependencies from deps stage
|
||||||
|
COPY --from=deps /app/node_modules ./node_modules
|
||||||
|
COPY --from=deps /app/web/node_modules ./web/node_modules
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Expose ports
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
# Default command
|
# Default command
|
||||||
|
|||||||
57
Dockerfile.prod
Normal file
57
Dockerfile.prod
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# =============================================================================
|
||||||
|
# Stage 1: Dependencies & Build
|
||||||
|
# =============================================================================
|
||||||
|
FROM oven/bun:latest AS builder
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies needed for build
|
||||||
|
RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install root project dependencies
|
||||||
|
COPY package.json bun.lock ./
|
||||||
|
RUN bun install --frozen-lockfile
|
||||||
|
|
||||||
|
# Install web project dependencies
|
||||||
|
COPY web/package.json web/bun.lock ./web/
|
||||||
|
RUN cd web && bun install --frozen-lockfile
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build web assets for production
|
||||||
|
RUN cd web && bun run build
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Stage 2: Production Runtime
|
||||||
|
# =============================================================================
|
||||||
|
FROM oven/bun:latest AS production
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Create non-root user for security (bun user already exists with 1000:1000)
|
||||||
|
# No need to create user/group
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Copy only what's needed for production
|
||||||
|
COPY --from=builder --chown=bun:bun /app/node_modules ./node_modules
|
||||||
|
COPY --from=builder --chown=bun:bun /app/web/node_modules ./web/node_modules
|
||||||
|
COPY --from=builder --chown=bun:bun /app/web/dist ./web/dist
|
||||||
|
COPY --from=builder --chown=bun:bun /app/web/src ./web/src
|
||||||
|
COPY --from=builder --chown=bun:bun /app/bot ./bot
|
||||||
|
COPY --from=builder --chown=bun:bun /app/shared ./shared
|
||||||
|
COPY --from=builder --chown=bun:bun /app/package.json .
|
||||||
|
COPY --from=builder --chown=bun:bun /app/drizzle.config.ts .
|
||||||
|
COPY --from=builder --chown=bun:bun /app/tsconfig.json .
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER bun
|
||||||
|
|
||||||
|
# Expose web dashboard port
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||||
|
CMD bun -e "fetch('http://localhost:3000/api/health').then(r => r.ok ? process.exit(0) : process.exit(1)).catch(() => process.exit(1))"
|
||||||
|
|
||||||
|
# Run in production mode
|
||||||
|
CMD ["bun", "run", "bot/index.ts"]
|
||||||
@@ -1,177 +0,0 @@
|
|||||||
import { createCommand } from "@shared/lib/utils";
|
|
||||||
import { SlashCommandBuilder, PermissionFlagsBits, MessageFlags, ComponentType } from "discord.js";
|
|
||||||
import { UpdateService } from "@shared/modules/admin/update.service";
|
|
||||||
import {
|
|
||||||
getCheckingEmbed,
|
|
||||||
getNoUpdatesEmbed,
|
|
||||||
getUpdatesAvailableMessage,
|
|
||||||
getPreparingEmbed,
|
|
||||||
getUpdatingEmbed,
|
|
||||||
getCancelledEmbed,
|
|
||||||
getTimeoutEmbed,
|
|
||||||
getErrorEmbed,
|
|
||||||
getRollbackSuccessEmbed,
|
|
||||||
getRollbackFailedEmbed
|
|
||||||
} from "@/modules/admin/update.view";
|
|
||||||
|
|
||||||
export const update = createCommand({
|
|
||||||
data: new SlashCommandBuilder()
|
|
||||||
.setName("update")
|
|
||||||
.setDescription("Check for updates and restart the bot")
|
|
||||||
.addSubcommand(sub =>
|
|
||||||
sub.setName("check")
|
|
||||||
.setDescription("Check for and apply available updates")
|
|
||||||
.addBooleanOption(option =>
|
|
||||||
option.setName("force")
|
|
||||||
.setDescription("Force update even if no changes detected")
|
|
||||||
.setRequired(false)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.addSubcommand(sub =>
|
|
||||||
sub.setName("rollback")
|
|
||||||
.setDescription("Rollback to the previous version")
|
|
||||||
)
|
|
||||||
.setDefaultMemberPermissions(PermissionFlagsBits.Administrator),
|
|
||||||
|
|
||||||
execute: async (interaction) => {
|
|
||||||
const subcommand = interaction.options.getSubcommand();
|
|
||||||
|
|
||||||
if (subcommand === "rollback") {
|
|
||||||
await handleRollback(interaction);
|
|
||||||
} else {
|
|
||||||
await handleUpdate(interaction);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
async function handleUpdate(interaction: any) {
|
|
||||||
await interaction.deferReply({ flags: MessageFlags.Ephemeral });
|
|
||||||
const force = interaction.options.getBoolean("force") || false;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// 1. Check for updates
|
|
||||||
await interaction.editReply({ embeds: [getCheckingEmbed()] });
|
|
||||||
const updateInfo = await UpdateService.checkForUpdates();
|
|
||||||
|
|
||||||
if (!updateInfo.hasUpdates && !force) {
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getNoUpdatesEmbed(updateInfo.currentCommit)]
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Analyze requirements
|
|
||||||
const requirements = await UpdateService.checkUpdateRequirements(updateInfo.branch);
|
|
||||||
const categories = UpdateService.categorizeChanges(requirements.changedFiles);
|
|
||||||
|
|
||||||
// 3. Show confirmation with details
|
|
||||||
const { embeds, components } = getUpdatesAvailableMessage(
|
|
||||||
updateInfo,
|
|
||||||
requirements,
|
|
||||||
categories,
|
|
||||||
force
|
|
||||||
);
|
|
||||||
const response = await interaction.editReply({ embeds, components });
|
|
||||||
|
|
||||||
// 4. Wait for confirmation
|
|
||||||
try {
|
|
||||||
const confirmation = await response.awaitMessageComponent({
|
|
||||||
filter: (i: any) => i.user.id === interaction.user.id,
|
|
||||||
componentType: ComponentType.Button,
|
|
||||||
time: 30000
|
|
||||||
});
|
|
||||||
|
|
||||||
if (confirmation.customId === "confirm_update") {
|
|
||||||
await confirmation.update({
|
|
||||||
embeds: [getPreparingEmbed()],
|
|
||||||
components: []
|
|
||||||
});
|
|
||||||
|
|
||||||
// 5. Save rollback point
|
|
||||||
const previousCommit = await UpdateService.saveRollbackPoint();
|
|
||||||
|
|
||||||
// 6. Prepare restart context
|
|
||||||
await UpdateService.prepareRestartContext({
|
|
||||||
channelId: interaction.channelId,
|
|
||||||
userId: interaction.user.id,
|
|
||||||
timestamp: Date.now(),
|
|
||||||
runMigrations: requirements.needsMigrations,
|
|
||||||
installDependencies: requirements.needsRootInstall || requirements.needsWebInstall,
|
|
||||||
buildWebAssets: requirements.needsWebBuild,
|
|
||||||
previousCommit: previousCommit.substring(0, 7),
|
|
||||||
newCommit: updateInfo.latestCommit
|
|
||||||
});
|
|
||||||
|
|
||||||
// 7. Show updating status
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getUpdatingEmbed(requirements)]
|
|
||||||
});
|
|
||||||
|
|
||||||
// 8. Perform update
|
|
||||||
await UpdateService.performUpdate(updateInfo.branch);
|
|
||||||
|
|
||||||
// 9. Trigger restart
|
|
||||||
await UpdateService.triggerRestart();
|
|
||||||
|
|
||||||
} else {
|
|
||||||
await confirmation.update({
|
|
||||||
embeds: [getCancelledEmbed()],
|
|
||||||
components: []
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (e) {
|
|
||||||
if (e instanceof Error && e.message.includes("time")) {
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getTimeoutEmbed()],
|
|
||||||
components: []
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Update failed:", error);
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getErrorEmbed(error)],
|
|
||||||
components: []
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleRollback(interaction: any) {
|
|
||||||
await interaction.deferReply({ flags: MessageFlags.Ephemeral });
|
|
||||||
|
|
||||||
try {
|
|
||||||
const hasRollback = await UpdateService.hasRollbackPoint();
|
|
||||||
|
|
||||||
if (!hasRollback) {
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getRollbackFailedEmbed("No rollback point available. Rollback is only possible after a recent update.")]
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await UpdateService.rollback();
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getRollbackSuccessEmbed(result.message.split(" ").pop() || "unknown")]
|
|
||||||
});
|
|
||||||
|
|
||||||
// Restart after rollback
|
|
||||||
setTimeout(() => UpdateService.triggerRestart(), 1000);
|
|
||||||
} else {
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getRollbackFailedEmbed(result.message)]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Rollback failed:", error);
|
|
||||||
await interaction.editReply({
|
|
||||||
embeds: [getErrorEmbed(error)]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -9,9 +9,7 @@ const event: Event<Events.ClientReady> = {
|
|||||||
console.log(`Ready! Logged in as ${c.user.tag}`);
|
console.log(`Ready! Logged in as ${c.user.tag}`);
|
||||||
schedulerService.start();
|
schedulerService.start();
|
||||||
|
|
||||||
// Handle post-update tasks
|
|
||||||
const { UpdateService } = await import("@shared/modules/admin/update.service");
|
|
||||||
await UpdateService.handlePostRestart(c);
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,8 @@ mock.module("discord.js", () => ({
|
|||||||
Routes: {
|
Routes: {
|
||||||
applicationGuildCommands: () => 'guild_route',
|
applicationGuildCommands: () => 'guild_route',
|
||||||
applicationCommands: () => 'global_route'
|
applicationCommands: () => 'global_route'
|
||||||
}
|
},
|
||||||
|
MessageFlags: {}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock loaders to avoid filesystem access during client init
|
// Mock loaders to avoid filesystem access during client init
|
||||||
|
|||||||
@@ -20,6 +20,9 @@ mock.module("./BotClient", () => ({
|
|||||||
commands: {
|
commands: {
|
||||||
size: 20,
|
size: 20,
|
||||||
},
|
},
|
||||||
|
knownCommands: {
|
||||||
|
size: 20,
|
||||||
|
},
|
||||||
lastCommandTimestamp: 1641481200000,
|
lastCommandTimestamp: 1641481200000,
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|||||||
@@ -1,35 +0,0 @@
|
|||||||
|
|
||||||
export interface RestartContext {
|
|
||||||
channelId: string;
|
|
||||||
userId: string;
|
|
||||||
timestamp: number;
|
|
||||||
runMigrations: boolean;
|
|
||||||
installDependencies: boolean;
|
|
||||||
buildWebAssets: boolean;
|
|
||||||
previousCommit: string;
|
|
||||||
newCommit: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface UpdateCheckResult {
|
|
||||||
needsRootInstall: boolean;
|
|
||||||
needsWebInstall: boolean;
|
|
||||||
needsWebBuild: boolean;
|
|
||||||
needsMigrations: boolean;
|
|
||||||
changedFiles: string[];
|
|
||||||
error?: Error;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface UpdateInfo {
|
|
||||||
hasUpdates: boolean;
|
|
||||||
branch: string;
|
|
||||||
currentCommit: string;
|
|
||||||
latestCommit: string;
|
|
||||||
commitCount: number;
|
|
||||||
commits: CommitInfo[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CommitInfo {
|
|
||||||
hash: string;
|
|
||||||
message: string;
|
|
||||||
author: string;
|
|
||||||
}
|
|
||||||
@@ -1,356 +0,0 @@
|
|||||||
import { ActionRowBuilder, ButtonBuilder, ButtonStyle, EmbedBuilder } from "discord.js";
|
|
||||||
import { createInfoEmbed, createSuccessEmbed, createWarningEmbed, createErrorEmbed } from "@lib/embeds";
|
|
||||||
import type { UpdateInfo, UpdateCheckResult } from "./update.types";
|
|
||||||
|
|
||||||
// Constants for UI
|
|
||||||
const LOG_TRUNCATE_LENGTH = 800;
|
|
||||||
const OUTPUT_TRUNCATE_LENGTH = 400;
|
|
||||||
|
|
||||||
function truncate(text: string, maxLength: number): string {
|
|
||||||
if (!text) return "";
|
|
||||||
return text.length > maxLength ? `${text.substring(0, maxLength)}...` : text;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============ Pre-Update Embeds ============
|
|
||||||
|
|
||||||
export function getCheckingEmbed() {
|
|
||||||
return createInfoEmbed("🔍 Fetching latest changes from remote...", "Checking for Updates");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getNoUpdatesEmbed(currentCommit: string) {
|
|
||||||
return createSuccessEmbed(
|
|
||||||
`You're running the latest version.\n\n**Current:** \`${currentCommit}\``,
|
|
||||||
"✅ Already Up to Date"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getUpdatesAvailableMessage(
|
|
||||||
updateInfo: UpdateInfo,
|
|
||||||
requirements: UpdateCheckResult,
|
|
||||||
changeCategories: Record<string, number>,
|
|
||||||
force: boolean
|
|
||||||
) {
|
|
||||||
const { branch, currentCommit, latestCommit, commitCount, commits } = updateInfo;
|
|
||||||
const { needsRootInstall, needsWebInstall, needsWebBuild, needsMigrations } = requirements;
|
|
||||||
|
|
||||||
// Build commit list (max 5)
|
|
||||||
const commitList = commits
|
|
||||||
.slice(0, 5)
|
|
||||||
.map(c => `\`${c.hash}\` ${truncate(c.message, 50)}`)
|
|
||||||
.join("\n");
|
|
||||||
|
|
||||||
const moreCommits = commitCount > 5 ? `\n*...and ${commitCount - 5} more*` : "";
|
|
||||||
|
|
||||||
// Build change categories
|
|
||||||
const categoryList = Object.entries(changeCategories)
|
|
||||||
.map(([cat, count]) => `• ${cat}: ${count} file${count > 1 ? "s" : ""}`)
|
|
||||||
.join("\n");
|
|
||||||
|
|
||||||
// Build requirements list
|
|
||||||
const reqs: string[] = [];
|
|
||||||
if (needsRootInstall) reqs.push("📦 Install root dependencies");
|
|
||||||
if (needsWebInstall) reqs.push("🌐 Install web dependencies");
|
|
||||||
if (needsWebBuild) reqs.push("🏗️ Build web dashboard");
|
|
||||||
if (needsMigrations) reqs.push("🗃️ Run database migrations");
|
|
||||||
if (reqs.length === 0) reqs.push("⚡ Quick update (no extra steps)");
|
|
||||||
|
|
||||||
const embed = new EmbedBuilder()
|
|
||||||
.setTitle("📥 Updates Available")
|
|
||||||
.setColor(force ? 0xFF6B6B : 0x5865F2)
|
|
||||||
.addFields(
|
|
||||||
{
|
|
||||||
name: "Version",
|
|
||||||
value: `\`${currentCommit}\` → \`${latestCommit}\``,
|
|
||||||
inline: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Branch",
|
|
||||||
value: `\`${branch}\``,
|
|
||||||
inline: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Commits",
|
|
||||||
value: `${commitCount} new commit${commitCount > 1 ? "s" : ""}`,
|
|
||||||
inline: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Recent Changes",
|
|
||||||
value: commitList + moreCommits || "No commits",
|
|
||||||
inline: false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Files Changed",
|
|
||||||
value: categoryList || "Unknown",
|
|
||||||
inline: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Update Actions",
|
|
||||||
value: reqs.join("\n"),
|
|
||||||
inline: true
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.setFooter({ text: force ? "⚠️ Force mode enabled" : "This will restart the bot" })
|
|
||||||
.setTimestamp();
|
|
||||||
|
|
||||||
const confirmButton = new ButtonBuilder()
|
|
||||||
.setCustomId("confirm_update")
|
|
||||||
.setLabel(force ? "Force Update" : "Update Now")
|
|
||||||
.setEmoji(force ? "⚠️" : "🚀")
|
|
||||||
.setStyle(force ? ButtonStyle.Danger : ButtonStyle.Success);
|
|
||||||
|
|
||||||
const cancelButton = new ButtonBuilder()
|
|
||||||
.setCustomId("cancel_update")
|
|
||||||
.setLabel("Cancel")
|
|
||||||
.setStyle(ButtonStyle.Secondary);
|
|
||||||
|
|
||||||
const row = new ActionRowBuilder<ButtonBuilder>().addComponents(confirmButton, cancelButton);
|
|
||||||
|
|
||||||
return { embeds: [embed], components: [row] };
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============ Update Progress Embeds ============
|
|
||||||
|
|
||||||
export function getPreparingEmbed() {
|
|
||||||
return createInfoEmbed(
|
|
||||||
"🔒 Saving rollback point...\n📥 Preparing to download updates...",
|
|
||||||
"⏳ Preparing Update"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getUpdatingEmbed(requirements: UpdateCheckResult) {
|
|
||||||
const steps: string[] = ["✅ Rollback point saved"];
|
|
||||||
|
|
||||||
steps.push("📥 Downloading updates...");
|
|
||||||
|
|
||||||
if (requirements.needsRootInstall || requirements.needsWebInstall) {
|
|
||||||
steps.push("📦 Dependencies will be installed after restart");
|
|
||||||
}
|
|
||||||
if (requirements.needsWebBuild) {
|
|
||||||
steps.push("🏗️ Web dashboard will be rebuilt after restart");
|
|
||||||
}
|
|
||||||
if (requirements.needsMigrations) {
|
|
||||||
steps.push("🗃️ Migrations will run after restart");
|
|
||||||
}
|
|
||||||
|
|
||||||
steps.push("\n🔄 **Restarting now...**");
|
|
||||||
|
|
||||||
return createWarningEmbed(steps.join("\n"), "🚀 Updating");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getCancelledEmbed() {
|
|
||||||
return createInfoEmbed("Update cancelled. No changes were made.", "❌ Cancelled");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getTimeoutEmbed() {
|
|
||||||
return createWarningEmbed(
|
|
||||||
"No response received within 30 seconds.\nRun `/update` again when ready.",
|
|
||||||
"⏰ Timed Out"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getErrorEmbed(error: unknown) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
return createErrorEmbed(
|
|
||||||
`The update could not be completed:\n\`\`\`\n${truncate(message, 500)}\n\`\`\``,
|
|
||||||
"❌ Update Failed"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============ Post-Restart Embeds ============
|
|
||||||
|
|
||||||
export interface PostRestartResult {
|
|
||||||
installSuccess: boolean;
|
|
||||||
installOutput: string;
|
|
||||||
webBuildSuccess: boolean;
|
|
||||||
webBuildOutput: string;
|
|
||||||
migrationSuccess: boolean;
|
|
||||||
migrationOutput: string;
|
|
||||||
ranInstall: boolean;
|
|
||||||
ranWebBuild: boolean;
|
|
||||||
ranMigrations: boolean;
|
|
||||||
previousCommit?: string;
|
|
||||||
newCommit?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getPostRestartEmbed(result: PostRestartResult, hasRollback: boolean) {
|
|
||||||
const isSuccess = result.installSuccess && result.webBuildSuccess && result.migrationSuccess;
|
|
||||||
|
|
||||||
const embed = new EmbedBuilder()
|
|
||||||
.setTitle(isSuccess ? "✅ Update Complete" : "⚠️ Update Completed with Issues")
|
|
||||||
.setColor(isSuccess ? 0x57F287 : 0xFEE75C)
|
|
||||||
.setTimestamp();
|
|
||||||
|
|
||||||
// Version info
|
|
||||||
if (result.previousCommit && result.newCommit) {
|
|
||||||
embed.addFields({
|
|
||||||
name: "Version",
|
|
||||||
value: `\`${result.previousCommit}\` → \`${result.newCommit}\``,
|
|
||||||
inline: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Results summary
|
|
||||||
const results: string[] = [];
|
|
||||||
|
|
||||||
if (result.ranInstall) {
|
|
||||||
results.push(result.installSuccess
|
|
||||||
? "✅ Dependencies installed"
|
|
||||||
: "❌ Dependency installation failed"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.ranWebBuild) {
|
|
||||||
results.push(result.webBuildSuccess
|
|
||||||
? "✅ Web dashboard built"
|
|
||||||
: "❌ Web dashboard build failed"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.ranMigrations) {
|
|
||||||
results.push(result.migrationSuccess
|
|
||||||
? "✅ Migrations applied"
|
|
||||||
: "❌ Migration failed"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (results.length > 0) {
|
|
||||||
embed.addFields({
|
|
||||||
name: "Actions Performed",
|
|
||||||
value: results.join("\n"),
|
|
||||||
inline: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Output details (collapsed if too long)
|
|
||||||
if (result.installOutput && !result.installSuccess) {
|
|
||||||
embed.addFields({
|
|
||||||
name: "Install Output",
|
|
||||||
value: `\`\`\`\n${truncate(result.installOutput, OUTPUT_TRUNCATE_LENGTH)}\n\`\`\``,
|
|
||||||
inline: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.webBuildOutput && !result.webBuildSuccess) {
|
|
||||||
embed.addFields({
|
|
||||||
name: "Web Build Output",
|
|
||||||
value: `\`\`\`\n${truncate(result.webBuildOutput, OUTPUT_TRUNCATE_LENGTH)}\n\`\`\``,
|
|
||||||
inline: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.migrationOutput && !result.migrationSuccess) {
|
|
||||||
embed.addFields({
|
|
||||||
name: "Migration Output",
|
|
||||||
value: `\`\`\`\n${truncate(result.migrationOutput, OUTPUT_TRUNCATE_LENGTH)}\n\`\`\``,
|
|
||||||
inline: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Footer with rollback hint
|
|
||||||
if (!isSuccess && hasRollback) {
|
|
||||||
embed.setFooter({ text: "💡 Use /update rollback to revert if needed" });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build components
|
|
||||||
const components: ActionRowBuilder<ButtonBuilder>[] = [];
|
|
||||||
|
|
||||||
if (!isSuccess && hasRollback) {
|
|
||||||
const rollbackButton = new ButtonBuilder()
|
|
||||||
.setCustomId("rollback_update")
|
|
||||||
.setLabel("Rollback")
|
|
||||||
.setEmoji("↩️")
|
|
||||||
.setStyle(ButtonStyle.Danger);
|
|
||||||
|
|
||||||
components.push(new ActionRowBuilder<ButtonBuilder>().addComponents(rollbackButton));
|
|
||||||
}
|
|
||||||
|
|
||||||
return { embeds: [embed], components };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getInstallingDependenciesEmbed() {
|
|
||||||
return createInfoEmbed(
|
|
||||||
"📦 Installing dependencies for root and web projects...\nThis may take a moment.",
|
|
||||||
"⏳ Installing Dependencies"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getRunningMigrationsEmbed() {
|
|
||||||
return createInfoEmbed(
|
|
||||||
"🗃️ Applying database migrations...",
|
|
||||||
"⏳ Running Migrations"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getBuildingWebEmbed() {
|
|
||||||
return createInfoEmbed(
|
|
||||||
"🌐 Building web dashboard assets...\nThis may take a moment.",
|
|
||||||
"⏳ Building Web Dashboard"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PostRestartProgress {
|
|
||||||
installDeps: boolean;
|
|
||||||
buildWeb: boolean;
|
|
||||||
runMigrations: boolean;
|
|
||||||
currentStep: "starting" | "install" | "build" | "migrate" | "done";
|
|
||||||
installDone?: boolean;
|
|
||||||
buildDone?: boolean;
|
|
||||||
migrateDone?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getPostRestartProgressEmbed(progress: PostRestartProgress) {
|
|
||||||
const steps: string[] = [];
|
|
||||||
|
|
||||||
// Installation step
|
|
||||||
if (progress.installDeps) {
|
|
||||||
if (progress.currentStep === "install") {
|
|
||||||
steps.push("⏳ Installing dependencies...");
|
|
||||||
} else if (progress.installDone) {
|
|
||||||
steps.push("✅ Dependencies installed");
|
|
||||||
} else {
|
|
||||||
steps.push("⬚ Install dependencies");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Web build step
|
|
||||||
if (progress.buildWeb) {
|
|
||||||
if (progress.currentStep === "build") {
|
|
||||||
steps.push("⏳ Building web dashboard...");
|
|
||||||
} else if (progress.buildDone) {
|
|
||||||
steps.push("✅ Web dashboard built");
|
|
||||||
} else {
|
|
||||||
steps.push("⬚ Build web dashboard");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Migrations step
|
|
||||||
if (progress.runMigrations) {
|
|
||||||
if (progress.currentStep === "migrate") {
|
|
||||||
steps.push("⏳ Running migrations...");
|
|
||||||
} else if (progress.migrateDone) {
|
|
||||||
steps.push("✅ Migrations applied");
|
|
||||||
} else {
|
|
||||||
steps.push("⬚ Run migrations");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (steps.length === 0) {
|
|
||||||
steps.push("⚡ Quick restart (no extra steps needed)");
|
|
||||||
}
|
|
||||||
|
|
||||||
return createInfoEmbed(steps.join("\n"), "🔄 Post-Update Tasks");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getRollbackSuccessEmbed(commit: string) {
|
|
||||||
return createSuccessEmbed(
|
|
||||||
`Successfully rolled back to commit \`${commit}\`.\nThe bot will restart now.`,
|
|
||||||
"↩️ Rollback Complete"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getRollbackFailedEmbed(error: string) {
|
|
||||||
return createErrorEmbed(
|
|
||||||
`Could not rollback:\n\`\`\`\n${error}\n\`\`\``,
|
|
||||||
"❌ Rollback Failed"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
34
bun.lock
34
bun.lock
@@ -92,27 +92,29 @@
|
|||||||
|
|
||||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="],
|
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="],
|
||||||
|
|
||||||
"@napi-rs/canvas": ["@napi-rs/canvas@0.1.84", "", { "optionalDependencies": { "@napi-rs/canvas-android-arm64": "0.1.84", "@napi-rs/canvas-darwin-arm64": "0.1.84", "@napi-rs/canvas-darwin-x64": "0.1.84", "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.84", "@napi-rs/canvas-linux-arm64-gnu": "0.1.84", "@napi-rs/canvas-linux-arm64-musl": "0.1.84", "@napi-rs/canvas-linux-riscv64-gnu": "0.1.84", "@napi-rs/canvas-linux-x64-gnu": "0.1.84", "@napi-rs/canvas-linux-x64-musl": "0.1.84", "@napi-rs/canvas-win32-x64-msvc": "0.1.84" } }, "sha512-88FTNFs4uuiFKP0tUrPsEXhpe9dg7za9ILZJE08pGdUveMIDeana1zwfVkqRHJDPJFAmGY3dXmJ99dzsy57YnA=="],
|
"@napi-rs/canvas": ["@napi-rs/canvas@0.1.89", "", { "optionalDependencies": { "@napi-rs/canvas-android-arm64": "0.1.89", "@napi-rs/canvas-darwin-arm64": "0.1.89", "@napi-rs/canvas-darwin-x64": "0.1.89", "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.89", "@napi-rs/canvas-linux-arm64-gnu": "0.1.89", "@napi-rs/canvas-linux-arm64-musl": "0.1.89", "@napi-rs/canvas-linux-riscv64-gnu": "0.1.89", "@napi-rs/canvas-linux-x64-gnu": "0.1.89", "@napi-rs/canvas-linux-x64-musl": "0.1.89", "@napi-rs/canvas-win32-arm64-msvc": "0.1.89", "@napi-rs/canvas-win32-x64-msvc": "0.1.89" } }, "sha512-7GjmkMirJHejeALCqUnZY3QwID7bbumOiLrqq2LKgxrdjdmxWQBTc6rcASa2u8wuWrH7qo4/4n/VNrOwCoKlKg=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-android-arm64": ["@napi-rs/canvas-android-arm64@0.1.84", "", { "os": "android", "cpu": "arm64" }, "sha512-pdvuqvj3qtwVryqgpAGornJLV6Ezpk39V6wT4JCnRVGy8I3Tk1au8qOalFGrx/r0Ig87hWslysPpHBxVpBMIww=="],
|
"@napi-rs/canvas-android-arm64": ["@napi-rs/canvas-android-arm64@0.1.89", "", { "os": "android", "cpu": "arm64" }, "sha512-CXxQTXsjtQqKGENS8Ejv9pZOFJhOPIl2goenS+aU8dY4DygvkyagDhy/I07D1YLqrDtPvLEX5zZHt8qUdnuIpQ=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-darwin-arm64": ["@napi-rs/canvas-darwin-arm64@0.1.84", "", { "os": "darwin", "cpu": "arm64" }, "sha512-A8IND3Hnv0R6abc6qCcCaOCujTLMmGxtucMTZ5vbQUrEN/scxi378MyTLtyWg+MRr6bwQJ6v/orqMS9datIcww=="],
|
"@napi-rs/canvas-darwin-arm64": ["@napi-rs/canvas-darwin-arm64@0.1.89", "", { "os": "darwin", "cpu": "arm64" }, "sha512-k29cR/Zl20WLYM7M8YePevRu2VQRaKcRedYr1V/8FFHkyIQ8kShEV+MPoPGi+znvmd17Eqjy2Pk2F2kpM2umVg=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-darwin-x64": ["@napi-rs/canvas-darwin-x64@0.1.84", "", { "os": "darwin", "cpu": "x64" }, "sha512-AUW45lJhYWwnA74LaNeqhvqYKK/2hNnBBBl03KRdqeCD4tKneUSrxUqIv8d22CBweOvrAASyKN3W87WO2zEr/A=="],
|
"@napi-rs/canvas-darwin-x64": ["@napi-rs/canvas-darwin-x64@0.1.89", "", { "os": "darwin", "cpu": "x64" }, "sha512-iUragqhBrA5FqU13pkhYBDbUD1WEAIlT8R2+fj6xHICY2nemzwMUI8OENDhRh7zuL06YDcRwENbjAVxOmaX9jg=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-linux-arm-gnueabihf": ["@napi-rs/canvas-linux-arm-gnueabihf@0.1.84", "", { "os": "linux", "cpu": "arm" }, "sha512-8zs5ZqOrdgs4FioTxSBrkl/wHZB56bJNBqaIsfPL4ZkEQCinOkrFF7xIcXiHiKp93J3wUtbIzeVrhTIaWwqk+A=="],
|
"@napi-rs/canvas-linux-arm-gnueabihf": ["@napi-rs/canvas-linux-arm-gnueabihf@0.1.89", "", { "os": "linux", "cpu": "arm" }, "sha512-y3SM9sfDWasY58ftoaI09YBFm35Ig8tosZqgahLJ2WGqawCusGNPV9P0/4PsrLOCZqGg629WxexQMY25n7zcvA=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-linux-arm64-gnu": ["@napi-rs/canvas-linux-arm64-gnu@0.1.84", "", { "os": "linux", "cpu": "arm64" }, "sha512-i204vtowOglJUpbAFWU5mqsJgH0lVpNk/Ml4mQtB4Lndd86oF+Otr6Mr5KQnZHqYGhlSIKiU2SYnUbhO28zGQA=="],
|
"@napi-rs/canvas-linux-arm64-gnu": ["@napi-rs/canvas-linux-arm64-gnu@0.1.89", "", { "os": "linux", "cpu": "arm64" }, "sha512-NEoF9y8xq5fX8HG8aZunBom1ILdTwt7ayBzSBIwrmitk7snj4W6Fz/yN/ZOmlM1iyzHDNX5Xn0n+VgWCF8BEdA=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-linux-arm64-musl": ["@napi-rs/canvas-linux-arm64-musl@0.1.84", "", { "os": "linux", "cpu": "arm64" }, "sha512-VyZq0EEw+OILnWk7G3ZgLLPaz1ERaPP++jLjeyLMbFOF+Tr4zHzWKiKDsEV/cT7btLPZbVoR3VX+T9/QubnURQ=="],
|
"@napi-rs/canvas-linux-arm64-musl": ["@napi-rs/canvas-linux-arm64-musl@0.1.89", "", { "os": "linux", "cpu": "arm64" }, "sha512-UQQkIEzV12/l60j1ziMjZ+mtodICNUbrd205uAhbyTw0t60CrC/EsKb5/aJWGq1wM0agvcgZV72JJCKfLS6+4w=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-linux-riscv64-gnu": ["@napi-rs/canvas-linux-riscv64-gnu@0.1.84", "", { "os": "linux", "cpu": "none" }, "sha512-PSMTh8DiThvLRsbtc/a065I/ceZk17EXAATv9uNvHgkgo7wdEfTh2C3aveNkBMGByVO3tvnvD5v/YFtZL07cIg=="],
|
"@napi-rs/canvas-linux-riscv64-gnu": ["@napi-rs/canvas-linux-riscv64-gnu@0.1.89", "", { "os": "linux", "cpu": "none" }, "sha512-1/VmEoFaIO6ONeeEMGoWF17wOYZOl5hxDC1ios2Bkz/oQjbJJ8DY/X22vWTmvuUKWWhBVlo63pxLGZbjJU/heA=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-linux-x64-gnu": ["@napi-rs/canvas-linux-x64-gnu@0.1.84", "", { "os": "linux", "cpu": "x64" }, "sha512-N1GY3noO1oqgEo3rYQIwY44kfM11vA0lDbN0orTOHfCSUZTUyiYCY0nZ197QMahZBm1aR/vYgsWpV74MMMDuNA=="],
|
"@napi-rs/canvas-linux-x64-gnu": ["@napi-rs/canvas-linux-x64-gnu@0.1.89", "", { "os": "linux", "cpu": "x64" }, "sha512-ebLuqkCuaPIkKgKH9q4+pqWi1tkPOfiTk5PM1LKR1tB9iO9sFNVSIgwEp+SJreTSbA2DK5rW8lQXiN78SjtcvA=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-linux-x64-musl": ["@napi-rs/canvas-linux-x64-musl@0.1.84", "", { "os": "linux", "cpu": "x64" }, "sha512-vUZmua6ADqTWyHyei81aXIt9wp0yjeNwTH0KdhdeoBb6azHmFR8uKTukZMXfLCC3bnsW0t4lW7K78KNMknmtjg=="],
|
"@napi-rs/canvas-linux-x64-musl": ["@napi-rs/canvas-linux-x64-musl@0.1.89", "", { "os": "linux", "cpu": "x64" }, "sha512-w+5qxHzplvA4BkHhCaizNMLLXiI+CfP84YhpHm/PqMub4u8J0uOAv+aaGv40rYEYra5hHRWr9LUd6cfW32o9/A=="],
|
||||||
|
|
||||||
"@napi-rs/canvas-win32-x64-msvc": ["@napi-rs/canvas-win32-x64-msvc@0.1.84", "", { "os": "win32", "cpu": "x64" }, "sha512-YSs8ncurc1xzegUMNnQUTYrdrAuaXdPMOa+iYYyAxydOtg0ppV386hyYMsy00Yip1NlTgLCseRG4sHSnjQx6og=="],
|
"@napi-rs/canvas-win32-arm64-msvc": ["@napi-rs/canvas-win32-arm64-msvc@0.1.89", "", { "os": "win32", "cpu": "arm64" }, "sha512-DmyXa5lJHcjOsDC78BM3bnEECqbK3xASVMrKfvtT/7S7Z8NGQOugvu+L7b41V6cexCd34mBWgMOsjoEBceeB1Q=="],
|
||||||
|
|
||||||
|
"@napi-rs/canvas-win32-x64-msvc": ["@napi-rs/canvas-win32-x64-msvc@0.1.89", "", { "os": "win32", "cpu": "x64" }, "sha512-WMej0LZrIqIncQcx0JHaMXlnAG7sncwJh7obs/GBgp0xF9qABjwoRwIooMWCZkSansapKGNUHhamY6qEnFN7gA=="],
|
||||||
|
|
||||||
"@sapphire/async-queue": ["@sapphire/async-queue@1.5.5", "", {}, "sha512-cvGzxbba6sav2zZkH8GPf2oGk9yYoD5qrNWdu9fRehifgnFZJMV+nuy2nON2roRO4yQQ+v7MK/Pktl/HgfsUXg=="],
|
"@sapphire/async-queue": ["@sapphire/async-queue@1.5.5", "", {}, "sha512-cvGzxbba6sav2zZkH8GPf2oGk9yYoD5qrNWdu9fRehifgnFZJMV+nuy2nON2roRO4yQQ+v7MK/Pktl/HgfsUXg=="],
|
||||||
|
|
||||||
@@ -120,7 +122,7 @@
|
|||||||
|
|
||||||
"@sapphire/snowflake": ["@sapphire/snowflake@3.5.3", "", {}, "sha512-jjmJywLAFoWeBi1W7994zZyiNWPIiqRRNAmSERxyg93xRGzNYvGjlZ0gR6x0F4gPRi2+0O6S71kOZYyr3cxaIQ=="],
|
"@sapphire/snowflake": ["@sapphire/snowflake@3.5.3", "", {}, "sha512-jjmJywLAFoWeBi1W7994zZyiNWPIiqRRNAmSERxyg93xRGzNYvGjlZ0gR6x0F4gPRi2+0O6S71kOZYyr3cxaIQ=="],
|
||||||
|
|
||||||
"@types/bun": ["@types/bun@1.3.3", "", { "dependencies": { "bun-types": "1.3.3" } }, "sha512-ogrKbJ2X5N0kWLLFKeytG0eHDleBYtngtlbu9cyBKFtNL3cnpDZkNdQj8flVf6WTZUX5ulI9AY1oa7ljhSrp+g=="],
|
"@types/bun": ["@types/bun@1.3.8", "", { "dependencies": { "bun-types": "1.3.8" } }, "sha512-3LvWJ2q5GerAXYxO2mffLTqOzEu5qnhEAlh48Vnu8WQfnmSwbgagjGZV6BoHKJztENYEDn6QmVd949W4uESRJA=="],
|
||||||
|
|
||||||
"@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="],
|
"@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="],
|
||||||
|
|
||||||
@@ -130,7 +132,7 @@
|
|||||||
|
|
||||||
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
|
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
|
||||||
|
|
||||||
"bun-types": ["bun-types@1.3.3", "", { "dependencies": { "@types/node": "*" } }, "sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ=="],
|
"bun-types": ["bun-types@1.3.8", "", { "dependencies": { "@types/node": "*" } }, "sha512-fL99nxdOWvV4LqjmC+8Q9kW3M4QTtTR1eePs94v5ctGqU8OeceWrSUaRw3JYb7tU3FkMIAjkueehrHPPPGKi5Q=="],
|
||||||
|
|
||||||
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
||||||
|
|
||||||
@@ -140,7 +142,7 @@
|
|||||||
|
|
||||||
"dotenv": ["dotenv@17.2.3", "", {}, "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w=="],
|
"dotenv": ["dotenv@17.2.3", "", {}, "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w=="],
|
||||||
|
|
||||||
"drizzle-kit": ["drizzle-kit@0.31.7", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-hOzRGSdyKIU4FcTSFYGKdXEjFsncVwHZ43gY3WU5Bz9j5Iadp6Rh6hxLSQ1IWXpKLBKt/d5y1cpSPcV+FcoQ1A=="],
|
"drizzle-kit": ["drizzle-kit@0.31.8", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-O9EC/miwdnRDY10qRxM8P3Pg8hXe3LyU4ZipReKOgTwn4OqANmftj8XJz1UPUAS6NMHf0E2htjsbQujUTkncCg=="],
|
||||||
|
|
||||||
"drizzle-orm": ["drizzle-orm@0.44.7", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-quIpnYznjU9lHshEOAYLoZ9s3jweleHlZIAWR/jX9gAWNg/JhQ1wj0KGRf7/Zm+obRrYd9GjPVJg790QY9N5AQ=="],
|
"drizzle-orm": ["drizzle-orm@0.44.7", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-quIpnYznjU9lHshEOAYLoZ9s3jweleHlZIAWR/jX9gAWNg/JhQ1wj0KGRf7/Zm+obRrYd9GjPVJg790QY9N5AQ=="],
|
||||||
|
|
||||||
@@ -160,7 +162,7 @@
|
|||||||
|
|
||||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||||
|
|
||||||
"postgres": ["postgres@3.4.7", "", {}, "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw=="],
|
"postgres": ["postgres@3.4.8", "", {}, "sha512-d+JFcLM17njZaOLkv6SCev7uoLaBtfK86vMUXhW1Z4glPWh4jozno9APvW/XKFJ3CCxVoC7OL38BqRydtu5nGg=="],
|
||||||
|
|
||||||
"resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
|
"resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
|
||||||
|
|
||||||
@@ -180,7 +182,7 @@
|
|||||||
|
|
||||||
"ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="],
|
"ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="],
|
||||||
|
|
||||||
"zod": ["zod@4.1.13", "", {}, "sha512-AvvthqfqrAhNH9dnfmrfKzX5upOdjUVJYFqNSlkmGf64gRaTzlPwz99IHYnVs28qYAybvAlBV+H7pn0saFY4Ig=="],
|
"zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
|
||||||
|
|
||||||
"@discordjs/rest/@discordjs/collection": ["@discordjs/collection@2.1.1", "", {}, "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg=="],
|
"@discordjs/rest/@discordjs/collection": ["@discordjs/collection@2.1.1", "", {}, "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg=="],
|
||||||
|
|
||||||
|
|||||||
81
docker-compose.prod.yml
Normal file
81
docker-compose.prod.yml
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# Production Docker Compose Configuration
|
||||||
|
# Usage: docker compose -f docker-compose.prod.yml up -d
|
||||||
|
#
|
||||||
|
# IMPORTANT: Database data is preserved in ./shared/db/data volume
|
||||||
|
|
||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: postgres:17-alpine
|
||||||
|
container_name: aurora_db
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=${DB_USER}
|
||||||
|
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||||
|
- POSTGRES_DB=${DB_NAME}
|
||||||
|
volumes:
|
||||||
|
# Database data - persisted across container rebuilds
|
||||||
|
- ./shared/db/data:/var/lib/postgresql/data
|
||||||
|
- ./shared/db/log:/var/log/postgresql
|
||||||
|
networks:
|
||||||
|
- internal
|
||||||
|
healthcheck:
|
||||||
|
test: [ "CMD-SHELL", "pg_isready -U ${DB_USER} -d ${DB_NAME}" ]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
# Security: limit resources
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: 512M
|
||||||
|
|
||||||
|
app:
|
||||||
|
container_name: aurora_app
|
||||||
|
restart: unless-stopped
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.prod
|
||||||
|
target: production
|
||||||
|
image: aurora-app:latest
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:3000:3000"
|
||||||
|
|
||||||
|
working_dir: /app
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=production
|
||||||
|
- HOST=0.0.0.0
|
||||||
|
- DB_USER=${DB_USER}
|
||||||
|
- DB_PASSWORD=${DB_PASSWORD}
|
||||||
|
- DB_NAME=${DB_NAME}
|
||||||
|
- DB_PORT=5432
|
||||||
|
- DB_HOST=db
|
||||||
|
- DISCORD_BOT_TOKEN=${DISCORD_BOT_TOKEN}
|
||||||
|
- DISCORD_GUILD_ID=${DISCORD_GUILD_ID}
|
||||||
|
- DISCORD_CLIENT_ID=${DISCORD_CLIENT_ID}
|
||||||
|
- DATABASE_URL=postgresql://${DB_USER}:${DB_PASSWORD}@db:5432/${DB_NAME}
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
networks:
|
||||||
|
- internal
|
||||||
|
- web
|
||||||
|
# Security: limit resources
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: 1G
|
||||||
|
# Logging configuration
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
|
networks:
|
||||||
|
internal:
|
||||||
|
driver: bridge
|
||||||
|
internal: true # No external access - DB isolated
|
||||||
|
web:
|
||||||
|
driver: bridge # App accessible from host (via reverse proxy)
|
||||||
@@ -7,13 +7,14 @@ services:
|
|||||||
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||||
- POSTGRES_DB=${DB_NAME}
|
- POSTGRES_DB=${DB_NAME}
|
||||||
# Uncomment to access DB from host (for debugging/drizzle-kit studio)
|
# Uncomment to access DB from host (for debugging/drizzle-kit studio)
|
||||||
# ports:
|
ports:
|
||||||
# - "127.0.0.1:${DB_PORT}:5432"
|
- "127.0.0.1:${DB_PORT}:5432"
|
||||||
volumes:
|
volumes:
|
||||||
|
# Host-mounted to preserve existing VPS data
|
||||||
- ./shared/db/data:/var/lib/postgresql/data
|
- ./shared/db/data:/var/lib/postgresql/data
|
||||||
- ./shared/db/log:/var/log/postgresql
|
|
||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
|
- web
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: [ "CMD-SHELL", "pg_isready -U ${DB_USER} -d ${DB_NAME}" ]
|
test: [ "CMD-SHELL", "pg_isready -U ${DB_USER} -d ${DB_NAME}" ]
|
||||||
interval: 5s
|
interval: 5s
|
||||||
@@ -23,17 +24,19 @@ services:
|
|||||||
app:
|
app:
|
||||||
container_name: aurora_app
|
container_name: aurora_app
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
image: aurora-app
|
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
|
target: development # Use development stage
|
||||||
working_dir: /app
|
working_dir: /app
|
||||||
ports:
|
ports:
|
||||||
- "127.0.0.1:3000:3000"
|
- "127.0.0.1:3000:3000"
|
||||||
volumes:
|
volumes:
|
||||||
|
# Mount source code for hot reloading
|
||||||
- .:/app
|
- .:/app
|
||||||
- /app/node_modules
|
# Use named volumes for node_modules (prevents host overwrite + caches deps)
|
||||||
- /app/web/node_modules
|
- app_node_modules:/app/node_modules
|
||||||
|
- web_node_modules:/app/web/node_modules
|
||||||
environment:
|
environment:
|
||||||
- HOST=0.0.0.0
|
- HOST=0.0.0.0
|
||||||
- DB_USER=${DB_USER}
|
- DB_USER=${DB_USER}
|
||||||
@@ -61,30 +64,21 @@ services:
|
|||||||
|
|
||||||
studio:
|
studio:
|
||||||
container_name: aurora_studio
|
container_name: aurora_studio
|
||||||
image: aurora-app
|
# Reuse the same built image as app (no duplicate builds!)
|
||||||
build:
|
extends:
|
||||||
context: .
|
service: app
|
||||||
dockerfile: Dockerfile
|
# Clear inherited ports from app and only expose studio port
|
||||||
working_dir: /app
|
ports: !override
|
||||||
ports:
|
|
||||||
- "127.0.0.1:4983:4983"
|
- "127.0.0.1:4983:4983"
|
||||||
volumes:
|
# Override healthcheck since studio doesn't serve on port 3000
|
||||||
- .:/app
|
healthcheck:
|
||||||
- /app/node_modules
|
test: [ "CMD", "bun", "-e", "fetch('http://localhost:4983').then(r => process.exit(0)).catch(() => process.exit(1))" ]
|
||||||
- /app/web/node_modules
|
interval: 30s
|
||||||
environment:
|
timeout: 10s
|
||||||
- DB_USER=${DB_USER}
|
retries: 3
|
||||||
- DB_PASSWORD=${DB_PASSWORD}
|
start_period: 10s
|
||||||
- DB_NAME=${DB_NAME}
|
# Disable restart for studio (it's an on-demand tool)
|
||||||
- DB_PORT=5432
|
restart: "no"
|
||||||
- DB_HOST=db
|
|
||||||
- DATABASE_URL=postgresql://${DB_USER}:${DB_PASSWORD}@db:5432/${DB_NAME}
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
condition: service_healthy
|
|
||||||
networks:
|
|
||||||
- internal
|
|
||||||
- web
|
|
||||||
command: [ "bun", "x", "drizzle-kit", "studio", "--port", "4983", "--host", "0.0.0.0" ]
|
command: [ "bun", "x", "drizzle-kit", "studio", "--port", "4983", "--host", "0.0.0.0" ]
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
@@ -93,3 +87,10 @@ networks:
|
|||||||
internal: true # No external access
|
internal: true # No external access
|
||||||
web:
|
web:
|
||||||
driver: bridge # Can be accessed from host
|
driver: bridge # Can be accessed from host
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
# Named volumes for node_modules caching
|
||||||
|
app_node_modules:
|
||||||
|
name: aurora_app_node_modules
|
||||||
|
web_node_modules:
|
||||||
|
name: aurora_web_node_modules
|
||||||
|
|||||||
17
package.json
17
package.json
@@ -6,10 +6,10 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/bun": "latest",
|
"@types/bun": "latest",
|
||||||
"drizzle-kit": "^0.31.7"
|
"drizzle-kit": "^0.31.8"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"typescript": "^5"
|
"typescript": "^5.9.3"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"generate": "docker compose run --rm app drizzle-kit generate",
|
"generate": "docker compose run --rm app drizzle-kit generate",
|
||||||
@@ -18,17 +18,18 @@
|
|||||||
"db:push:local": "drizzle-kit push",
|
"db:push:local": "drizzle-kit push",
|
||||||
"dev": "bun --watch bot/index.ts",
|
"dev": "bun --watch bot/index.ts",
|
||||||
"db:studio": "drizzle-kit studio --port 4983 --host 0.0.0.0",
|
"db:studio": "drizzle-kit studio --port 4983 --host 0.0.0.0",
|
||||||
"studio:remote": "bash shared/scripts/remote-studio.sh",
|
|
||||||
"dashboard:remote": "bash shared/scripts/remote-dashboard.sh",
|
|
||||||
"remote": "bash shared/scripts/remote.sh",
|
"remote": "bash shared/scripts/remote.sh",
|
||||||
"test": "bun test"
|
"logs": "bash shared/scripts/logs.sh",
|
||||||
|
"db:backup": "bash shared/scripts/db-backup.sh",
|
||||||
|
"test": "bun test",
|
||||||
|
"docker:cleanup": "bash shared/scripts/docker-cleanup.sh"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@napi-rs/canvas": "^0.1.84",
|
"@napi-rs/canvas": "^0.1.89",
|
||||||
"discord.js": "^14.25.1",
|
"discord.js": "^14.25.1",
|
||||||
"dotenv": "^17.2.3",
|
"dotenv": "^17.2.3",
|
||||||
"drizzle-orm": "^0.44.7",
|
"drizzle-orm": "^0.44.7",
|
||||||
"postgres": "^3.4.7",
|
"postgres": "^3.4.8",
|
||||||
"zod": "^4.1.13"
|
"zod": "^4.3.6"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,13 +1,18 @@
|
|||||||
import { drizzle } from "drizzle-orm/bun-sql";
|
import { drizzle } from "drizzle-orm/postgres-js";
|
||||||
import { SQL } from "bun";
|
import postgresJs from "postgres"; // Renamed import
|
||||||
import * as schema from "./schema";
|
import * as schema from "./schema";
|
||||||
import { env } from "@shared/lib/env";
|
import { env } from "@shared/lib/env";
|
||||||
|
|
||||||
const connectionString = env.DATABASE_URL;
|
const connectionString = env.DATABASE_URL;
|
||||||
export const postgres = new SQL(connectionString);
|
|
||||||
|
|
||||||
export const DrizzleClient = drizzle(postgres, { schema });
|
// Disable prefetch to prevent connection handling issues in serverless/container environments
|
||||||
|
const client = postgresJs(connectionString, { prepare: false });
|
||||||
|
|
||||||
|
export const DrizzleClient = drizzle(client, { schema });
|
||||||
|
|
||||||
|
// Export the raw client as 'postgres' to match previous Bun.SQL export name/usage
|
||||||
|
export const postgres = client;
|
||||||
|
|
||||||
export const closeDatabase = async () => {
|
export const closeDatabase = async () => {
|
||||||
await postgres.close();
|
await client.end();
|
||||||
};
|
};
|
||||||
@@ -1,42 +1,43 @@
|
|||||||
import { expect, test, describe } from "bun:test";
|
import { expect, test, describe } from "bun:test";
|
||||||
import { postgres } from "./DrizzleClient";
|
import { DrizzleClient } from "./DrizzleClient";
|
||||||
|
import { sql } from "drizzle-orm";
|
||||||
|
|
||||||
describe("Database Indexes", () => {
|
describe("Database Indexes", () => {
|
||||||
test("should have indexes on users table", async () => {
|
test("should have indexes on users table", async () => {
|
||||||
const result = await postgres`
|
const result = await DrizzleClient.execute(sql`
|
||||||
SELECT indexname FROM pg_indexes
|
SELECT indexname FROM pg_indexes
|
||||||
WHERE tablename = 'users'
|
WHERE tablename = 'users'
|
||||||
`;
|
`);
|
||||||
const indexNames = (result as unknown as { indexname: string }[]).map(r => r.indexname);
|
const indexNames = result.map(r => r.indexname);
|
||||||
expect(indexNames).toContain("users_balance_idx");
|
expect(indexNames).toContain("users_balance_idx");
|
||||||
expect(indexNames).toContain("users_level_xp_idx");
|
expect(indexNames).toContain("users_level_xp_idx");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("should have index on transactions table", async () => {
|
test("should have index on transactions table", async () => {
|
||||||
const result = await postgres`
|
const result = await DrizzleClient.execute(sql`
|
||||||
SELECT indexname FROM pg_indexes
|
SELECT indexname FROM pg_indexes
|
||||||
WHERE tablename = 'transactions'
|
WHERE tablename = 'transactions'
|
||||||
`;
|
`);
|
||||||
const indexNames = (result as unknown as { indexname: string }[]).map(r => r.indexname);
|
const indexNames = result.map(r => r.indexname);
|
||||||
expect(indexNames).toContain("transactions_created_at_idx");
|
expect(indexNames).toContain("transactions_created_at_idx");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("should have indexes on moderation_cases table", async () => {
|
test("should have indexes on moderation_cases table", async () => {
|
||||||
const result = await postgres`
|
const result = await DrizzleClient.execute(sql`
|
||||||
SELECT indexname FROM pg_indexes
|
SELECT indexname FROM pg_indexes
|
||||||
WHERE tablename = 'moderation_cases'
|
WHERE tablename = 'moderation_cases'
|
||||||
`;
|
`);
|
||||||
const indexNames = (result as unknown as { indexname: string }[]).map(r => r.indexname);
|
const indexNames = result.map(r => r.indexname);
|
||||||
expect(indexNames).toContain("moderation_cases_user_id_idx");
|
expect(indexNames).toContain("moderation_cases_user_id_idx");
|
||||||
expect(indexNames).toContain("moderation_cases_case_id_idx");
|
expect(indexNames).toContain("moderation_cases_case_id_idx");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("should have indexes on user_timers table", async () => {
|
test("should have indexes on user_timers table", async () => {
|
||||||
const result = await postgres`
|
const result = await DrizzleClient.execute(sql`
|
||||||
SELECT indexname FROM pg_indexes
|
SELECT indexname FROM pg_indexes
|
||||||
WHERE tablename = 'user_timers'
|
WHERE tablename = 'user_timers'
|
||||||
`;
|
`);
|
||||||
const indexNames = (result as unknown as { indexname: string }[]).map(r => r.indexname);
|
const indexNames = result.map(r => r.indexname);
|
||||||
expect(indexNames).toContain("user_timers_expires_at_idx");
|
expect(indexNames).toContain("user_timers_expires_at_idx");
|
||||||
expect(indexNames).toContain("user_timers_lookup_idx");
|
expect(indexNames).toContain("user_timers_lookup_idx");
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,248 +0,0 @@
|
|||||||
import { describe, expect, test, mock, beforeEach, afterAll, spyOn } from "bun:test";
|
|
||||||
import * as fs from "fs/promises";
|
|
||||||
|
|
||||||
// Mock child_process BEFORE importing the service
|
|
||||||
const mockExec = mock((cmd: string, callback?: any) => {
|
|
||||||
// Handle calls without callback (like exec().unref())
|
|
||||||
if (!callback) {
|
|
||||||
return { unref: () => { } };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cmd.includes("git rev-parse")) {
|
|
||||||
callback(null, { stdout: "main\n" });
|
|
||||||
} else if (cmd.includes("git fetch")) {
|
|
||||||
callback(null, { stdout: "" });
|
|
||||||
} else if (cmd.includes("git log")) {
|
|
||||||
callback(null, { stdout: "abcdef Update 1\n123456 Update 2" });
|
|
||||||
} else if (cmd.includes("git diff")) {
|
|
||||||
callback(null, { stdout: "package.json\nsrc/index.ts" });
|
|
||||||
} else if (cmd.includes("git reset")) {
|
|
||||||
callback(null, { stdout: "HEAD is now at abcdef Update 1" });
|
|
||||||
} else if (cmd.includes("bun install")) {
|
|
||||||
callback(null, { stdout: "Installed dependencies" });
|
|
||||||
} else if (cmd.includes("drizzle-kit migrate")) {
|
|
||||||
callback(null, { stdout: "Migrations applied" });
|
|
||||||
} else {
|
|
||||||
callback(null, { stdout: "" });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
mock.module("child_process", () => ({
|
|
||||||
exec: mockExec
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock fs/promises
|
|
||||||
const mockWriteFile = mock((path: string, content: string) => Promise.resolve());
|
|
||||||
const mockReadFile = mock((path: string, encoding: string) => Promise.resolve("{}"));
|
|
||||||
const mockUnlink = mock((path: string) => Promise.resolve());
|
|
||||||
|
|
||||||
mock.module("fs/promises", () => ({
|
|
||||||
writeFile: mockWriteFile,
|
|
||||||
readFile: mockReadFile,
|
|
||||||
unlink: mockUnlink
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock view module to avoid import issues
|
|
||||||
mock.module("./update.view", () => ({
|
|
||||||
getPostRestartEmbed: () => ({ title: "Update Complete" }),
|
|
||||||
getInstallingDependenciesEmbed: () => ({ title: "Installing..." }),
|
|
||||||
}));
|
|
||||||
|
|
||||||
describe("UpdateService", () => {
|
|
||||||
let UpdateService: any;
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
mockExec.mockClear();
|
|
||||||
mockWriteFile.mockClear();
|
|
||||||
mockReadFile.mockClear();
|
|
||||||
mockUnlink.mockClear();
|
|
||||||
|
|
||||||
// Dynamically import to ensure mock is used
|
|
||||||
const module = await import("./update.service");
|
|
||||||
UpdateService = module.UpdateService;
|
|
||||||
});
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
mock.restore();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("checkForUpdates", () => {
|
|
||||||
test("should return updates if git log has output", async () => {
|
|
||||||
const result = await UpdateService.checkForUpdates();
|
|
||||||
|
|
||||||
expect(result.hasUpdates).toBe(true);
|
|
||||||
expect(result.branch).toBe("main");
|
|
||||||
expect(result.log).toContain("Update 1");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("should call git rev-parse, fetch, and log commands", async () => {
|
|
||||||
await UpdateService.checkForUpdates();
|
|
||||||
|
|
||||||
const calls = mockExec.mock.calls.map((c: any) => c[0]);
|
|
||||||
expect(calls.some((cmd: string) => cmd.includes("git rev-parse"))).toBe(true);
|
|
||||||
expect(calls.some((cmd: string) => cmd.includes("git fetch"))).toBe(true);
|
|
||||||
expect(calls.some((cmd: string) => cmd.includes("git log"))).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("performUpdate", () => {
|
|
||||||
test("should run git reset --hard with correct branch", async () => {
|
|
||||||
await UpdateService.performUpdate("main");
|
|
||||||
|
|
||||||
const lastCall = mockExec.mock.lastCall;
|
|
||||||
expect(lastCall).toBeDefined();
|
|
||||||
expect(lastCall![0]).toContain("git reset --hard origin/main");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("checkUpdateRequirements", () => {
|
|
||||||
test("should detect package.json and schema.ts changes", async () => {
|
|
||||||
const result = await UpdateService.checkUpdateRequirements("main");
|
|
||||||
|
|
||||||
expect(result.needsInstall).toBe(true);
|
|
||||||
expect(result.needsMigrations).toBe(false); // mock doesn't include schema.ts
|
|
||||||
expect(result.error).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test("should call git diff with correct branch", async () => {
|
|
||||||
await UpdateService.checkUpdateRequirements("develop");
|
|
||||||
|
|
||||||
const lastCall = mockExec.mock.lastCall;
|
|
||||||
expect(lastCall).toBeDefined();
|
|
||||||
expect(lastCall![0]).toContain("git diff HEAD..origin/develop");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("installDependencies", () => {
|
|
||||||
test("should run bun install and return output", async () => {
|
|
||||||
const output = await UpdateService.installDependencies();
|
|
||||||
|
|
||||||
expect(output).toBe("Installed dependencies");
|
|
||||||
const lastCall = mockExec.mock.lastCall;
|
|
||||||
expect(lastCall![0]).toBe("bun install");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("prepareRestartContext", () => {
|
|
||||||
test("should write context to file", async () => {
|
|
||||||
const context = {
|
|
||||||
channelId: "123",
|
|
||||||
userId: "456",
|
|
||||||
timestamp: Date.now(),
|
|
||||||
runMigrations: true,
|
|
||||||
installDependencies: false
|
|
||||||
};
|
|
||||||
|
|
||||||
await UpdateService.prepareRestartContext(context);
|
|
||||||
|
|
||||||
expect(mockWriteFile).toHaveBeenCalled();
|
|
||||||
const lastCall = mockWriteFile.mock.lastCall as [string, string] | undefined;
|
|
||||||
expect(lastCall).toBeDefined();
|
|
||||||
expect(lastCall![0]).toContain("restart_context");
|
|
||||||
expect(JSON.parse(lastCall![1])).toEqual(context);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("triggerRestart", () => {
|
|
||||||
test("should use RESTART_COMMAND env var when set", async () => {
|
|
||||||
const originalEnv = process.env.RESTART_COMMAND;
|
|
||||||
process.env.RESTART_COMMAND = "pm2 restart bot";
|
|
||||||
|
|
||||||
await UpdateService.triggerRestart();
|
|
||||||
|
|
||||||
const lastCall = mockExec.mock.lastCall;
|
|
||||||
expect(lastCall).toBeDefined();
|
|
||||||
expect(lastCall![0]).toBe("pm2 restart bot");
|
|
||||||
|
|
||||||
process.env.RESTART_COMMAND = originalEnv;
|
|
||||||
});
|
|
||||||
|
|
||||||
test("should write to trigger file when no env var", async () => {
|
|
||||||
const originalEnv = process.env.RESTART_COMMAND;
|
|
||||||
delete process.env.RESTART_COMMAND;
|
|
||||||
|
|
||||||
await UpdateService.triggerRestart();
|
|
||||||
|
|
||||||
expect(mockWriteFile).toHaveBeenCalled();
|
|
||||||
const lastCall = mockWriteFile.mock.lastCall as [string, string] | undefined;
|
|
||||||
expect(lastCall).toBeDefined();
|
|
||||||
expect(lastCall![0]).toContain("restart_trigger");
|
|
||||||
|
|
||||||
process.env.RESTART_COMMAND = originalEnv;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("handlePostRestart", () => {
|
|
||||||
const createMockClient = (channel: any = null) => ({
|
|
||||||
channels: {
|
|
||||||
fetch: mock(() => Promise.resolve(channel))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const createMockChannel = () => ({
|
|
||||||
isSendable: () => true,
|
|
||||||
send: mock(() => Promise.resolve())
|
|
||||||
});
|
|
||||||
|
|
||||||
test("should ignore stale context (>10 mins old)", async () => {
|
|
||||||
const staleContext = {
|
|
||||||
channelId: "123",
|
|
||||||
userId: "456",
|
|
||||||
timestamp: Date.now() - (15 * 60 * 1000), // 15 mins ago
|
|
||||||
runMigrations: true,
|
|
||||||
installDependencies: true
|
|
||||||
};
|
|
||||||
|
|
||||||
mockReadFile.mockImplementationOnce(() => Promise.resolve(JSON.stringify(staleContext)));
|
|
||||||
|
|
||||||
const mockChannel = createMockChannel();
|
|
||||||
// Create mock with instanceof support
|
|
||||||
const channel = Object.assign(mockChannel, { constructor: { name: "TextChannel" } });
|
|
||||||
Object.setPrototypeOf(channel, Object.create({ constructor: { name: "TextChannel" } }));
|
|
||||||
|
|
||||||
const mockClient = createMockClient(channel);
|
|
||||||
|
|
||||||
await UpdateService.handlePostRestart(mockClient);
|
|
||||||
|
|
||||||
// Should not send any message for stale context
|
|
||||||
expect(mockChannel.send).not.toHaveBeenCalled();
|
|
||||||
// Should clean up the context file
|
|
||||||
expect(mockUnlink).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
test("should do nothing if no context file exists", async () => {
|
|
||||||
mockReadFile.mockImplementationOnce(() => Promise.reject(new Error("ENOENT")));
|
|
||||||
|
|
||||||
const mockClient = createMockClient();
|
|
||||||
|
|
||||||
await UpdateService.handlePostRestart(mockClient);
|
|
||||||
|
|
||||||
// Should not throw and not try to clean up
|
|
||||||
expect(mockUnlink).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
test("should clean up context file after processing", async () => {
|
|
||||||
const validContext = {
|
|
||||||
channelId: "123",
|
|
||||||
userId: "456",
|
|
||||||
timestamp: Date.now(),
|
|
||||||
runMigrations: false,
|
|
||||||
installDependencies: false
|
|
||||||
};
|
|
||||||
|
|
||||||
mockReadFile.mockImplementationOnce(() => Promise.resolve(JSON.stringify(validContext)));
|
|
||||||
|
|
||||||
// Create a proper TextChannel mock
|
|
||||||
const { TextChannel } = await import("discord.js");
|
|
||||||
const mockChannel = Object.create(TextChannel.prototype);
|
|
||||||
mockChannel.isSendable = () => true;
|
|
||||||
mockChannel.send = mock(() => Promise.resolve());
|
|
||||||
|
|
||||||
const mockClient = createMockClient(mockChannel);
|
|
||||||
|
|
||||||
await UpdateService.handlePostRestart(mockClient);
|
|
||||||
|
|
||||||
expect(mockUnlink).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,385 +0,0 @@
|
|||||||
import { exec } from "child_process";
|
|
||||||
import { promisify } from "util";
|
|
||||||
import { writeFile, readFile, unlink } from "fs/promises";
|
|
||||||
import { Client, TextChannel } from "discord.js";
|
|
||||||
import { getPostRestartEmbed, getPostRestartProgressEmbed, type PostRestartProgress } from "@/modules/admin/update.view";
|
|
||||||
import type { PostRestartResult } from "@/modules/admin/update.view";
|
|
||||||
import type { RestartContext, UpdateCheckResult, UpdateInfo, CommitInfo } from "@/modules/admin/update.types";
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
// Constants
|
|
||||||
const STALE_CONTEXT_MS = 10 * 60 * 1000; // 10 minutes
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
export class UpdateService {
|
|
||||||
private static readonly CONTEXT_FILE = ".restart_context.json";
|
|
||||||
private static readonly ROLLBACK_FILE = ".rollback_commit.txt";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check for available updates with detailed commit information
|
|
||||||
*/
|
|
||||||
static async checkForUpdates(): Promise<UpdateInfo> {
|
|
||||||
const { stdout: branchName } = await execAsync("git rev-parse --abbrev-ref HEAD");
|
|
||||||
const branch = branchName.trim();
|
|
||||||
|
|
||||||
const { stdout: currentCommit } = await execAsync("git rev-parse --short HEAD");
|
|
||||||
|
|
||||||
await execAsync("git fetch --all");
|
|
||||||
|
|
||||||
const { stdout: latestCommit } = await execAsync(`git rev-parse --short origin/${branch}`);
|
|
||||||
|
|
||||||
// Get commit log with author info
|
|
||||||
const { stdout: logOutput } = await execAsync(
|
|
||||||
`git log HEAD..origin/${branch} --format="%h|%s|%an" --no-merges`
|
|
||||||
);
|
|
||||||
|
|
||||||
const commits: CommitInfo[] = logOutput
|
|
||||||
.trim()
|
|
||||||
.split("\n")
|
|
||||||
.filter(line => line.length > 0)
|
|
||||||
.map(line => {
|
|
||||||
const [hash, message, author] = line.split("|");
|
|
||||||
return { hash: hash || "", message: message || "", author: author || "" };
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
hasUpdates: commits.length > 0,
|
|
||||||
branch,
|
|
||||||
currentCommit: currentCommit.trim(),
|
|
||||||
latestCommit: latestCommit.trim(),
|
|
||||||
commitCount: commits.length,
|
|
||||||
commits
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Analyze what the update requires
|
|
||||||
*/
|
|
||||||
static async checkUpdateRequirements(branch: string): Promise<UpdateCheckResult> {
|
|
||||||
try {
|
|
||||||
const { stdout } = await execAsync(`git diff HEAD..origin/${branch} --name-only`);
|
|
||||||
const changedFiles = stdout.trim().split("\n").filter(f => f.length > 0);
|
|
||||||
|
|
||||||
const needsRootInstall = changedFiles.some(file =>
|
|
||||||
file === "package.json" || file === "bun.lock"
|
|
||||||
);
|
|
||||||
|
|
||||||
const needsWebInstall = changedFiles.some(file =>
|
|
||||||
file === "web/package.json" || file === "web/bun.lock"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Detect if web source files changed (requires rebuild)
|
|
||||||
const needsWebBuild = changedFiles.some(file =>
|
|
||||||
file.startsWith("web/src/") ||
|
|
||||||
file === "web/build.ts" ||
|
|
||||||
file === "web/tailwind.config.ts" ||
|
|
||||||
file === "web/tsconfig.json"
|
|
||||||
);
|
|
||||||
|
|
||||||
const needsMigrations = changedFiles.some(file =>
|
|
||||||
file.includes("schema.ts") || file.startsWith("drizzle/")
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
|
||||||
needsRootInstall,
|
|
||||||
needsWebInstall,
|
|
||||||
needsWebBuild,
|
|
||||||
needsMigrations,
|
|
||||||
changedFiles
|
|
||||||
};
|
|
||||||
} catch (e) {
|
|
||||||
console.error("Failed to check update requirements:", e);
|
|
||||||
return {
|
|
||||||
needsRootInstall: false,
|
|
||||||
needsWebInstall: false,
|
|
||||||
needsWebBuild: false,
|
|
||||||
needsMigrations: false,
|
|
||||||
changedFiles: [],
|
|
||||||
error: e instanceof Error ? e : new Error(String(e))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a summary of changed file categories
|
|
||||||
*/
|
|
||||||
static categorizeChanges(changedFiles: string[]): Record<string, number> {
|
|
||||||
const categories: Record<string, number> = {};
|
|
||||||
|
|
||||||
for (const file of changedFiles) {
|
|
||||||
let category = "Other";
|
|
||||||
|
|
||||||
if (file.startsWith("bot/commands/")) category = "Commands";
|
|
||||||
else if (file.startsWith("bot/modules/")) category = "Modules";
|
|
||||||
else if (file.startsWith("web/")) category = "Web Dashboard";
|
|
||||||
else if (file.startsWith("bot/lib/") || file.startsWith("shared/lib/")) category = "Library";
|
|
||||||
else if (file.startsWith("drizzle/") || file.includes("schema")) category = "Database";
|
|
||||||
else if (file.endsWith(".test.ts")) category = "Tests";
|
|
||||||
else if (file.includes("package.json") || file.includes("lock")) category = "Dependencies";
|
|
||||||
|
|
||||||
categories[category] = (categories[category] || 0) + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return categories;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save the current commit for potential rollback
|
|
||||||
*/
|
|
||||||
static async saveRollbackPoint(): Promise<string> {
|
|
||||||
const { stdout } = await execAsync("git rev-parse HEAD");
|
|
||||||
const commit = stdout.trim();
|
|
||||||
await writeFile(this.ROLLBACK_FILE, commit);
|
|
||||||
return commit;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Rollback to the previous commit
|
|
||||||
*/
|
|
||||||
static async rollback(): Promise<{ success: boolean; message: string }> {
|
|
||||||
try {
|
|
||||||
const rollbackCommit = await readFile(this.ROLLBACK_FILE, "utf-8");
|
|
||||||
await execAsync(`git reset --hard ${rollbackCommit.trim()}`);
|
|
||||||
await unlink(this.ROLLBACK_FILE);
|
|
||||||
return { success: true, message: `Rolled back to ${rollbackCommit.trim().substring(0, 7)}` };
|
|
||||||
} catch (e) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
message: e instanceof Error ? e.message : "No rollback point available"
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a rollback point exists
|
|
||||||
*/
|
|
||||||
static async hasRollbackPoint(): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
await readFile(this.ROLLBACK_FILE, "utf-8");
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Perform the git update
|
|
||||||
*/
|
|
||||||
static async performUpdate(branch: string): Promise<void> {
|
|
||||||
await execAsync(`git reset --hard origin/${branch}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Install dependencies for specified projects
|
|
||||||
*/
|
|
||||||
static async installDependencies(options: { root: boolean; web: boolean }): Promise<string> {
|
|
||||||
const outputs: string[] = [];
|
|
||||||
|
|
||||||
if (options.root) {
|
|
||||||
const { stdout } = await execAsync("bun install");
|
|
||||||
outputs.push(`📦 Root: ${stdout.trim() || "Done"}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.web) {
|
|
||||||
const { stdout } = await execAsync("cd web && bun install");
|
|
||||||
outputs.push(`🌐 Web: ${stdout.trim() || "Done"}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return outputs.join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prepare restart context with rollback info
|
|
||||||
*/
|
|
||||||
static async prepareRestartContext(context: RestartContext): Promise<void> {
|
|
||||||
await writeFile(this.CONTEXT_FILE, JSON.stringify(context));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Trigger a restart
|
|
||||||
*/
|
|
||||||
static async triggerRestart(): Promise<void> {
|
|
||||||
if (process.env.RESTART_COMMAND) {
|
|
||||||
exec(process.env.RESTART_COMMAND).unref();
|
|
||||||
} else {
|
|
||||||
setTimeout(() => process.exit(0), 100);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle post-restart tasks
|
|
||||||
*/
|
|
||||||
static async handlePostRestart(client: Client): Promise<void> {
|
|
||||||
try {
|
|
||||||
const context = await this.loadRestartContext();
|
|
||||||
if (!context) return;
|
|
||||||
|
|
||||||
if (this.isContextStale(context)) {
|
|
||||||
await this.cleanupContext();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const channel = await this.fetchNotificationChannel(client, context.channelId);
|
|
||||||
if (!channel) {
|
|
||||||
await this.cleanupContext();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await this.executePostRestartTasks(context, channel);
|
|
||||||
await this.notifyPostRestartResult(channel, result, context);
|
|
||||||
await this.cleanupContext();
|
|
||||||
} catch (e) {
|
|
||||||
console.error("Failed to handle post-restart context:", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Private Helper Methods ---
|
|
||||||
|
|
||||||
private static async loadRestartContext(): Promise<RestartContext | null> {
|
|
||||||
try {
|
|
||||||
const contextData = await readFile(this.CONTEXT_FILE, "utf-8");
|
|
||||||
return JSON.parse(contextData) as RestartContext;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static isContextStale(context: RestartContext): boolean {
|
|
||||||
return Date.now() - context.timestamp > STALE_CONTEXT_MS;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static async fetchNotificationChannel(client: Client, channelId: string): Promise<TextChannel | null> {
|
|
||||||
try {
|
|
||||||
const channel = await client.channels.fetch(channelId);
|
|
||||||
if (channel && channel.isSendable() && channel instanceof TextChannel) {
|
|
||||||
return channel;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static async executePostRestartTasks(
|
|
||||||
context: RestartContext,
|
|
||||||
channel: TextChannel
|
|
||||||
): Promise<PostRestartResult> {
|
|
||||||
const result: PostRestartResult = {
|
|
||||||
installSuccess: true,
|
|
||||||
installOutput: "",
|
|
||||||
webBuildSuccess: true,
|
|
||||||
webBuildOutput: "",
|
|
||||||
migrationSuccess: true,
|
|
||||||
migrationOutput: "",
|
|
||||||
ranInstall: context.installDependencies,
|
|
||||||
ranWebBuild: context.buildWebAssets,
|
|
||||||
ranMigrations: context.runMigrations,
|
|
||||||
previousCommit: context.previousCommit,
|
|
||||||
newCommit: context.newCommit
|
|
||||||
};
|
|
||||||
|
|
||||||
// Track progress for consolidated message
|
|
||||||
const progress: PostRestartProgress = {
|
|
||||||
installDeps: context.installDependencies,
|
|
||||||
buildWeb: context.buildWebAssets,
|
|
||||||
runMigrations: context.runMigrations,
|
|
||||||
currentStep: "starting"
|
|
||||||
};
|
|
||||||
|
|
||||||
// Only send progress message if there are tasks to run
|
|
||||||
const hasTasks = context.installDependencies || context.buildWebAssets || context.runMigrations;
|
|
||||||
let progressMessage = hasTasks
|
|
||||||
? await channel.send({ embeds: [getPostRestartProgressEmbed(progress)] })
|
|
||||||
: null;
|
|
||||||
|
|
||||||
// Helper to update progress message
|
|
||||||
const updateProgress = async () => {
|
|
||||||
if (progressMessage) {
|
|
||||||
await progressMessage.edit({ embeds: [getPostRestartProgressEmbed(progress)] });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// 1. Install Dependencies if needed
|
|
||||||
if (context.installDependencies) {
|
|
||||||
try {
|
|
||||||
progress.currentStep = "install";
|
|
||||||
await updateProgress();
|
|
||||||
|
|
||||||
const { stdout: rootOutput } = await execAsync("bun install");
|
|
||||||
const { stdout: webOutput } = await execAsync("cd web && bun install");
|
|
||||||
|
|
||||||
result.installOutput = `📦 Root: ${rootOutput.trim() || "Done"}\n🌐 Web: ${webOutput.trim() || "Done"}`;
|
|
||||||
progress.installDone = true;
|
|
||||||
} catch (err: unknown) {
|
|
||||||
result.installSuccess = false;
|
|
||||||
result.installOutput = err instanceof Error ? err.message : String(err);
|
|
||||||
progress.installDone = true; // Mark as done even on failure
|
|
||||||
console.error("Dependency Install Failed:", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Build Web Assets if needed
|
|
||||||
if (context.buildWebAssets) {
|
|
||||||
try {
|
|
||||||
progress.currentStep = "build";
|
|
||||||
await updateProgress();
|
|
||||||
|
|
||||||
const { stdout } = await execAsync("cd web && bun run build");
|
|
||||||
result.webBuildOutput = stdout.trim() || "Build completed successfully";
|
|
||||||
progress.buildDone = true;
|
|
||||||
} catch (err: unknown) {
|
|
||||||
result.webBuildSuccess = false;
|
|
||||||
result.webBuildOutput = err instanceof Error ? err.message : String(err);
|
|
||||||
progress.buildDone = true;
|
|
||||||
console.error("Web Build Failed:", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Run Migrations
|
|
||||||
if (context.runMigrations) {
|
|
||||||
try {
|
|
||||||
progress.currentStep = "migrate";
|
|
||||||
await updateProgress();
|
|
||||||
|
|
||||||
const { stdout } = await execAsync("bun x drizzle-kit migrate");
|
|
||||||
result.migrationOutput = stdout;
|
|
||||||
progress.migrateDone = true;
|
|
||||||
} catch (err: unknown) {
|
|
||||||
result.migrationSuccess = false;
|
|
||||||
result.migrationOutput = err instanceof Error ? err.message : String(err);
|
|
||||||
progress.migrateDone = true;
|
|
||||||
console.error("Migration Failed:", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete progress message before final result
|
|
||||||
if (progressMessage) {
|
|
||||||
try {
|
|
||||||
await progressMessage.delete();
|
|
||||||
} catch {
|
|
||||||
// Message may already be deleted, ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static async notifyPostRestartResult(
|
|
||||||
channel: TextChannel,
|
|
||||||
result: PostRestartResult,
|
|
||||||
context: RestartContext
|
|
||||||
): Promise<void> {
|
|
||||||
const hasRollback = await this.hasRollbackPoint();
|
|
||||||
await channel.send(getPostRestartEmbed(result, hasRollback));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static async cleanupContext(): Promise<void> {
|
|
||||||
try {
|
|
||||||
await unlink(this.CONTEXT_FILE);
|
|
||||||
} catch {
|
|
||||||
// File may not exist, ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -7,6 +7,8 @@ const mockLimit = mock();
|
|||||||
// Helper to support the chained calls in getLeaderboards
|
// Helper to support the chained calls in getLeaderboards
|
||||||
const mockChain = {
|
const mockChain = {
|
||||||
from: () => mockChain,
|
from: () => mockChain,
|
||||||
|
leftJoin: () => mockChain,
|
||||||
|
groupBy: () => mockChain,
|
||||||
orderBy: () => mockChain,
|
orderBy: () => mockChain,
|
||||||
limit: mockLimit
|
limit: mockLimit
|
||||||
};
|
};
|
||||||
@@ -75,7 +77,8 @@ describe("dashboardService", () => {
|
|||||||
// First call is topLevels, second is topWealth
|
// First call is topLevels, second is topWealth
|
||||||
mockLimit
|
mockLimit
|
||||||
.mockResolvedValueOnce(mockTopLevels)
|
.mockResolvedValueOnce(mockTopLevels)
|
||||||
.mockResolvedValueOnce(mockTopWealth);
|
.mockResolvedValueOnce(mockTopWealth)
|
||||||
|
.mockResolvedValueOnce(mockTopWealth); // Mock net worth same as wealth for simplicity
|
||||||
|
|
||||||
const result = await dashboardService.getLeaderboards();
|
const result = await dashboardService.getLeaderboards();
|
||||||
|
|
||||||
@@ -85,7 +88,7 @@ describe("dashboardService", () => {
|
|||||||
expect(result.topWealth[0]!.balance).toBe("1000");
|
expect(result.topWealth[0]!.balance).toBe("1000");
|
||||||
expect(result.topWealth[0]!.username).toBe("Alice");
|
expect(result.topWealth[0]!.username).toBe("Alice");
|
||||||
expect(result.topWealth[1]!.balance).toBe("500");
|
expect(result.topWealth[1]!.balance).toBe("500");
|
||||||
expect(mockLimit).toHaveBeenCalledTimes(2);
|
expect(mockLimit).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("should handle empty leaderboards", async () => {
|
test("should handle empty leaderboards", async () => {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { economyService } from "@shared/modules/economy/economy.service";
|
|||||||
import { users, userTimers, transactions } from "@db/schema";
|
import { users, userTimers, transactions } from "@db/schema";
|
||||||
|
|
||||||
// Define mock functions
|
// Define mock functions
|
||||||
const mockFindMany = mock();
|
const mockFindMany = mock(() => Promise.resolve([]));
|
||||||
const mockFindFirst = mock();
|
const mockFindFirst = mock();
|
||||||
const mockInsert = mock();
|
const mockInsert = mock();
|
||||||
const mockUpdate = mock();
|
const mockUpdate = mock();
|
||||||
@@ -33,6 +33,7 @@ mock.module("@shared/db/DrizzleClient", () => {
|
|||||||
query: {
|
query: {
|
||||||
users: { findFirst: mockFindFirst },
|
users: { findFirst: mockFindFirst },
|
||||||
userTimers: { findFirst: mockFindFirst },
|
userTimers: { findFirst: mockFindFirst },
|
||||||
|
userQuests: { findMany: mockFindMany },
|
||||||
},
|
},
|
||||||
insert: mockInsert,
|
insert: mockInsert,
|
||||||
update: mockUpdate,
|
update: mockUpdate,
|
||||||
@@ -173,7 +174,7 @@ describe("economyService", () => {
|
|||||||
it("should throw if cooldown is active", async () => {
|
it("should throw if cooldown is active", async () => {
|
||||||
const future = new Date("2023-01-02T12:00:00Z"); // +24h
|
const future = new Date("2023-01-02T12:00:00Z"); // +24h
|
||||||
mockFindFirst.mockResolvedValue({ expiresAt: future });
|
mockFindFirst.mockResolvedValue({ expiresAt: future });
|
||||||
expect(economyService.claimDaily("1")).rejects.toThrow("Daily already claimed");
|
expect(economyService.claimDaily("1")).rejects.toThrow("You have already claimed your daily reward today");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should set cooldown to next UTC midnight", async () => {
|
it("should set cooldown to next UTC midnight", async () => {
|
||||||
|
|||||||
@@ -48,6 +48,8 @@ mock.module("@shared/db/DrizzleClient", () => {
|
|||||||
inventory: { findFirst: mockFindFirst, findMany: mockFindMany },
|
inventory: { findFirst: mockFindFirst, findMany: mockFindMany },
|
||||||
items: { findFirst: mockFindFirst },
|
items: { findFirst: mockFindFirst },
|
||||||
userTimers: { findFirst: mockFindFirst },
|
userTimers: { findFirst: mockFindFirst },
|
||||||
|
userQuests: { findMany: mockFindMany, findFirst: mockFindFirst },
|
||||||
|
quests: { findMany: mockFindMany },
|
||||||
},
|
},
|
||||||
insert: mockInsert,
|
insert: mockInsert,
|
||||||
update: mockUpdate,
|
update: mockUpdate,
|
||||||
@@ -79,6 +81,7 @@ describe("inventoryService", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockFindFirst.mockReset();
|
mockFindFirst.mockReset();
|
||||||
mockFindMany.mockReset();
|
mockFindMany.mockReset();
|
||||||
|
mockFindMany.mockResolvedValue([]);
|
||||||
mockInsert.mockClear();
|
mockInsert.mockClear();
|
||||||
mockUpdate.mockClear();
|
mockUpdate.mockClear();
|
||||||
mockDelete.mockClear();
|
mockDelete.mockClear();
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { users, userTimers } from "@db/schema";
|
|||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
const mockFindFirst = mock();
|
const mockFindFirst = mock();
|
||||||
|
const mockFindMany = mock(() => Promise.resolve([]));
|
||||||
const mockUpdate = mock();
|
const mockUpdate = mock();
|
||||||
const mockSet = mock();
|
const mockSet = mock();
|
||||||
const mockWhere = mock();
|
const mockWhere = mock();
|
||||||
@@ -24,8 +25,10 @@ mockOnConflictDoUpdate.mockResolvedValue({});
|
|||||||
mock.module("@shared/db/DrizzleClient", () => {
|
mock.module("@shared/db/DrizzleClient", () => {
|
||||||
const createMockTx = () => ({
|
const createMockTx = () => ({
|
||||||
query: {
|
query: {
|
||||||
|
|
||||||
users: { findFirst: mockFindFirst },
|
users: { findFirst: mockFindFirst },
|
||||||
userTimers: { findFirst: mockFindFirst },
|
userTimers: { findFirst: mockFindFirst },
|
||||||
|
userQuests: { findMany: mockFindMany },
|
||||||
},
|
},
|
||||||
update: mockUpdate,
|
update: mockUpdate,
|
||||||
insert: mockInsert,
|
insert: mockInsert,
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ mock.module("@shared/lib/config", () => ({
|
|||||||
|
|
||||||
// Mock View
|
// Mock View
|
||||||
const mockGetUserWarningEmbed = mock(() => ({}));
|
const mockGetUserWarningEmbed = mock(() => ({}));
|
||||||
mock.module("./moderation.view", () => ({
|
mock.module("@/modules/moderation/moderation.view", () => ({
|
||||||
getUserWarningEmbed: mockGetUserWarningEmbed
|
getUserWarningEmbed: mockGetUserWarningEmbed
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,83 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test";
|
import { describe, it, expect, mock, beforeEach } from "bun:test";
|
||||||
import { triviaService } from "./trivia.service";
|
import { triviaService } from "./trivia.service";
|
||||||
import { DrizzleClient } from "@shared/db/DrizzleClient";
|
import { users, userTimers, transactions } from "@db/schema";
|
||||||
import { users, userTimers } from "@db/schema";
|
|
||||||
import { eq, and } from "drizzle-orm";
|
|
||||||
import { config } from "@shared/lib/config";
|
|
||||||
import { TimerType } from "@shared/lib/constants";
|
import { TimerType } from "@shared/lib/constants";
|
||||||
|
|
||||||
// Mock fetch for OpenTDB API
|
// Define mock functions
|
||||||
const mockFetch = mock(() => Promise.resolve({
|
const mockFindFirst = mock();
|
||||||
|
const mockFindMany = mock(() => Promise.resolve([]));
|
||||||
|
const mockInsert = mock();
|
||||||
|
const mockUpdate = mock();
|
||||||
|
const mockDelete = mock();
|
||||||
|
const mockValues = mock();
|
||||||
|
const mockReturning = mock();
|
||||||
|
const mockSet = mock();
|
||||||
|
const mockWhere = mock();
|
||||||
|
const mockOnConflictDoUpdate = mock();
|
||||||
|
const mockRecordEvent = mock(() => Promise.resolve());
|
||||||
|
|
||||||
|
// Chain setup
|
||||||
|
mockInsert.mockReturnValue({ values: mockValues });
|
||||||
|
mockValues.mockReturnValue({
|
||||||
|
returning: mockReturning,
|
||||||
|
onConflictDoUpdate: mockOnConflictDoUpdate
|
||||||
|
});
|
||||||
|
mockOnConflictDoUpdate.mockResolvedValue({});
|
||||||
|
|
||||||
|
mockUpdate.mockReturnValue({ set: mockSet });
|
||||||
|
mockSet.mockReturnValue({ where: mockWhere });
|
||||||
|
mockWhere.mockReturnValue({ returning: mockReturning });
|
||||||
|
|
||||||
|
// Mock DrizzleClient
|
||||||
|
mock.module("@shared/db/DrizzleClient", () => {
|
||||||
|
const createMockTx = () => ({
|
||||||
|
query: {
|
||||||
|
users: { findFirst: mockFindFirst },
|
||||||
|
userTimers: { findFirst: mockFindFirst },
|
||||||
|
userQuests: { findMany: mockFindMany },
|
||||||
|
},
|
||||||
|
insert: mockInsert,
|
||||||
|
update: mockUpdate,
|
||||||
|
delete: mockDelete,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
DrizzleClient: {
|
||||||
|
query: {
|
||||||
|
users: { findFirst: mockFindFirst },
|
||||||
|
userTimers: { findFirst: mockFindFirst },
|
||||||
|
},
|
||||||
|
insert: mockInsert,
|
||||||
|
update: mockUpdate,
|
||||||
|
delete: mockDelete,
|
||||||
|
transaction: async (cb: any) => cb(createMockTx())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock Config
|
||||||
|
mock.module("@shared/lib/config", () => ({
|
||||||
|
config: {
|
||||||
|
trivia: {
|
||||||
|
entryFee: 50n,
|
||||||
|
rewardMultiplier: 2.0,
|
||||||
|
timeoutSeconds: 300,
|
||||||
|
cooldownMs: 60000,
|
||||||
|
categories: [9],
|
||||||
|
difficulty: 'medium'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock Dashboard Service
|
||||||
|
mock.module("@shared/modules/dashboard/dashboard.service", () => ({
|
||||||
|
dashboardService: {
|
||||||
|
recordEvent: mockRecordEvent
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock fetch for OpenTDB
|
||||||
|
global.fetch = mock(() => Promise.resolve({
|
||||||
json: () => Promise.resolve({
|
json: () => Promise.resolve({
|
||||||
response_code: 0,
|
response_code: 0,
|
||||||
results: [{
|
results: [{
|
||||||
@@ -23,39 +93,25 @@ const mockFetch = mock(() => Promise.resolve({
|
|||||||
]
|
]
|
||||||
}]
|
}]
|
||||||
})
|
})
|
||||||
}));
|
})) as any;
|
||||||
|
|
||||||
global.fetch = mockFetch as any;
|
|
||||||
|
|
||||||
describe("TriviaService", () => {
|
describe("TriviaService", () => {
|
||||||
const TEST_USER_ID = "999999999";
|
const TEST_USER_ID = "999999999";
|
||||||
const TEST_USERNAME = "testuser";
|
const TEST_USERNAME = "testuser";
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(() => {
|
||||||
// Clean up test data
|
mockFindFirst.mockReset();
|
||||||
await DrizzleClient.delete(userTimers)
|
mockInsert.mockClear();
|
||||||
.where(eq(userTimers.userId, BigInt(TEST_USER_ID)));
|
mockUpdate.mockClear();
|
||||||
|
mockDelete.mockClear();
|
||||||
// Ensure test user exists with sufficient balance
|
mockValues.mockClear();
|
||||||
await DrizzleClient.insert(users)
|
mockReturning.mockClear();
|
||||||
.values({
|
mockSet.mockClear();
|
||||||
id: BigInt(TEST_USER_ID),
|
mockWhere.mockClear();
|
||||||
username: TEST_USERNAME,
|
mockOnConflictDoUpdate.mockClear();
|
||||||
balance: 1000n,
|
mockRecordEvent.mockClear();
|
||||||
xp: 0n,
|
// Clear active sessions
|
||||||
})
|
(triviaService as any).activeSessions.clear();
|
||||||
.onConflictDoUpdate({
|
|
||||||
target: [users.id],
|
|
||||||
set: {
|
|
||||||
balance: 1000n,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(async () => {
|
|
||||||
// Clean up
|
|
||||||
await DrizzleClient.delete(userTimers)
|
|
||||||
.where(eq(userTimers.userId, BigInt(TEST_USER_ID)));
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("fetchQuestion", () => {
|
describe("fetchQuestion", () => {
|
||||||
@@ -66,176 +122,146 @@ describe("TriviaService", () => {
|
|||||||
expect(question.question).toBe('What is 2 + 2?');
|
expect(question.question).toBe('What is 2 + 2?');
|
||||||
expect(question.correctAnswer).toBe('4');
|
expect(question.correctAnswer).toBe('4');
|
||||||
expect(question.incorrectAnswers).toHaveLength(3);
|
expect(question.incorrectAnswers).toHaveLength(3);
|
||||||
expect(question.type).toBe('multiple');
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("canPlayTrivia", () => {
|
describe("canPlayTrivia", () => {
|
||||||
it("should allow playing when no cooldown exists", async () => {
|
it("should allow playing when no cooldown exists", async () => {
|
||||||
|
mockFindFirst.mockResolvedValue(undefined);
|
||||||
const result = await triviaService.canPlayTrivia(TEST_USER_ID);
|
const result = await triviaService.canPlayTrivia(TEST_USER_ID);
|
||||||
|
|
||||||
expect(result.canPlay).toBe(true);
|
expect(result.canPlay).toBe(true);
|
||||||
expect(result.nextAvailable).toBeUndefined();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should prevent playing when on cooldown", async () => {
|
it("should prevent playing when on cooldown", async () => {
|
||||||
const futureDate = new Date(Date.now() + 60000);
|
const future = new Date(Date.now() + 60000);
|
||||||
|
mockFindFirst.mockResolvedValue({ expiresAt: future });
|
||||||
await DrizzleClient.insert(userTimers).values({
|
|
||||||
userId: BigInt(TEST_USER_ID),
|
|
||||||
type: TimerType.TRIVIA_COOLDOWN,
|
|
||||||
key: 'default',
|
|
||||||
expiresAt: futureDate,
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await triviaService.canPlayTrivia(TEST_USER_ID);
|
const result = await triviaService.canPlayTrivia(TEST_USER_ID);
|
||||||
|
|
||||||
expect(result.canPlay).toBe(false);
|
expect(result.canPlay).toBe(false);
|
||||||
expect(result.nextAvailable).toBeDefined();
|
expect(result.nextAvailable).toBe(future);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should allow playing when cooldown has expired", async () => {
|
it("should allow playing when cooldown has expired", async () => {
|
||||||
const pastDate = new Date(Date.now() - 1000);
|
const past = new Date(Date.now() - 1000);
|
||||||
|
mockFindFirst.mockResolvedValue({ expiresAt: past });
|
||||||
await DrizzleClient.insert(userTimers).values({
|
|
||||||
userId: BigInt(TEST_USER_ID),
|
|
||||||
type: TimerType.TRIVIA_COOLDOWN,
|
|
||||||
key: 'default',
|
|
||||||
expiresAt: pastDate,
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await triviaService.canPlayTrivia(TEST_USER_ID);
|
const result = await triviaService.canPlayTrivia(TEST_USER_ID);
|
||||||
|
|
||||||
expect(result.canPlay).toBe(true);
|
expect(result.canPlay).toBe(true);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("startTrivia", () => {
|
describe("startTrivia", () => {
|
||||||
it("should start a trivia session and deduct entry fee", async () => {
|
it("should start a trivia session and deduct entry fee", async () => {
|
||||||
|
// Mock cooldown check (first call) and balance check (second call)
|
||||||
|
mockFindFirst
|
||||||
|
.mockResolvedValueOnce(undefined) // No cooldown
|
||||||
|
.mockResolvedValueOnce({ id: 1n, balance: 1000n }); // User balance
|
||||||
|
|
||||||
const session = await triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME);
|
const session = await triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME);
|
||||||
|
|
||||||
expect(session).toBeDefined();
|
expect(session).toBeDefined();
|
||||||
expect(session.sessionId).toContain(TEST_USER_ID);
|
|
||||||
expect(session.userId).toBe(TEST_USER_ID);
|
expect(session.userId).toBe(TEST_USER_ID);
|
||||||
expect(session.question).toBeDefined();
|
expect(session.entryFee).toBe(50n);
|
||||||
expect(session.allAnswers).toHaveLength(4);
|
|
||||||
expect(session.entryFee).toBe(config.trivia.entryFee);
|
|
||||||
expect(session.potentialReward).toBeGreaterThan(0n);
|
|
||||||
|
|
||||||
// Verify balance deduction
|
// Check deduction
|
||||||
const user = await DrizzleClient.query.users.findFirst({
|
expect(mockUpdate).toHaveBeenCalledWith(users);
|
||||||
where: eq(users.id, BigInt(TEST_USER_ID))
|
expect(mockSet).toHaveBeenCalledWith(expect.objectContaining({
|
||||||
});
|
// sql templating makes exact match hard, checking general invocation
|
||||||
|
}));
|
||||||
|
|
||||||
expect(user?.balance).toBe(1000n - config.trivia.entryFee);
|
// Check transactions
|
||||||
|
expect(mockInsert).toHaveBeenCalledWith(transactions);
|
||||||
|
|
||||||
// Verify cooldown was set
|
// Check cooldown set
|
||||||
const cooldown = await DrizzleClient.query.userTimers.findFirst({
|
expect(mockInsert).toHaveBeenCalledWith(userTimers);
|
||||||
where: and(
|
expect(mockOnConflictDoUpdate).toHaveBeenCalled();
|
||||||
eq(userTimers.userId, BigInt(TEST_USER_ID)),
|
|
||||||
eq(userTimers.type, TimerType.TRIVIA_COOLDOWN),
|
|
||||||
eq(userTimers.key, 'default')
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(cooldown).toBeDefined();
|
// Check dashboard event
|
||||||
|
expect(mockRecordEvent).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if user has insufficient balance", async () => {
|
it("should throw error if user has insufficient balance", async () => {
|
||||||
// Set balance to less than entry fee
|
mockFindFirst
|
||||||
await DrizzleClient.update(users)
|
.mockResolvedValueOnce(undefined) // No cooldown
|
||||||
.set({ balance: 10n })
|
.mockResolvedValueOnce({ id: 1n, balance: 10n }); // Insufficient balance
|
||||||
.where(eq(users.id, BigInt(TEST_USER_ID)));
|
|
||||||
|
|
||||||
await expect(triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME))
|
expect(triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME))
|
||||||
.rejects.toThrow('Insufficient funds');
|
.rejects.toThrow("Insufficient funds");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if user is on cooldown", async () => {
|
it("should throw error if user is on cooldown", async () => {
|
||||||
const futureDate = new Date(Date.now() + 60000);
|
mockFindFirst.mockResolvedValueOnce({ expiresAt: new Date(Date.now() + 60000) });
|
||||||
|
|
||||||
await DrizzleClient.insert(userTimers).values({
|
expect(triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME))
|
||||||
userId: BigInt(TEST_USER_ID),
|
.rejects.toThrow("cooldown");
|
||||||
type: TimerType.TRIVIA_COOLDOWN,
|
|
||||||
key: 'default',
|
|
||||||
expiresAt: futureDate,
|
|
||||||
});
|
|
||||||
|
|
||||||
await expect(triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME))
|
|
||||||
.rejects.toThrow('cooldown');
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("submitAnswer", () => {
|
describe("submitAnswer", () => {
|
||||||
it("should award prize for correct answer", async () => {
|
it("should award prize for correct answer", async () => {
|
||||||
const session = await triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME);
|
// Setup an active session manually
|
||||||
const balanceBefore = (await DrizzleClient.query.users.findFirst({
|
const session = {
|
||||||
where: eq(users.id, BigInt(TEST_USER_ID))
|
sessionId: "test_session",
|
||||||
}))!.balance!;
|
userId: TEST_USER_ID,
|
||||||
|
question: { correctAnswer: "4" },
|
||||||
|
potentialReward: 100n
|
||||||
|
};
|
||||||
|
(triviaService as any).activeSessions.set("test_session", session);
|
||||||
|
|
||||||
const result = await triviaService.submitAnswer(session.sessionId, TEST_USER_ID, true);
|
// Mock user balance fetch for reward update
|
||||||
|
mockFindFirst.mockResolvedValue({ id: 1n, balance: 950n });
|
||||||
|
|
||||||
|
const result = await triviaService.submitAnswer("test_session", TEST_USER_ID, true);
|
||||||
|
|
||||||
expect(result.correct).toBe(true);
|
expect(result.correct).toBe(true);
|
||||||
expect(result.reward).toBe(session.potentialReward);
|
expect(result.reward).toBe(100n);
|
||||||
expect(result.correctAnswer).toBe(session.question.correctAnswer);
|
|
||||||
|
|
||||||
// Verify balance increase
|
// Verify balance update
|
||||||
const user = await DrizzleClient.query.users.findFirst({
|
expect(mockUpdate).toHaveBeenCalledWith(users);
|
||||||
where: eq(users.id, BigInt(TEST_USER_ID))
|
expect(mockInsert).toHaveBeenCalledWith(transactions);
|
||||||
});
|
expect(mockRecordEvent).toHaveBeenCalled();
|
||||||
|
|
||||||
expect(user?.balance).toBe(balanceBefore + session.potentialReward);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should not award prize for incorrect answer", async () => {
|
it("should not award prize for incorrect answer", async () => {
|
||||||
const session = await triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME);
|
const session = {
|
||||||
const balanceBefore = (await DrizzleClient.query.users.findFirst({
|
sessionId: "test_session",
|
||||||
where: eq(users.id, BigInt(TEST_USER_ID))
|
userId: TEST_USER_ID,
|
||||||
}))!.balance!;
|
question: { correctAnswer: "4" },
|
||||||
|
potentialReward: 100n
|
||||||
|
};
|
||||||
|
(triviaService as any).activeSessions.set("test_session", session);
|
||||||
|
|
||||||
const result = await triviaService.submitAnswer(session.sessionId, TEST_USER_ID, false);
|
const result = await triviaService.submitAnswer("test_session", TEST_USER_ID, false);
|
||||||
|
|
||||||
expect(result.correct).toBe(false);
|
expect(result.correct).toBe(false);
|
||||||
expect(result.reward).toBe(0n);
|
expect(result.reward).toBe(0n);
|
||||||
expect(result.correctAnswer).toBe(session.question.correctAnswer);
|
|
||||||
|
|
||||||
// Verify balance unchanged (already deducted at start)
|
// No balance update
|
||||||
const user = await DrizzleClient.query.users.findFirst({
|
expect(mockUpdate).not.toHaveBeenCalled();
|
||||||
where: eq(users.id, BigInt(TEST_USER_ID))
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(user?.balance).toBe(balanceBefore);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if session doesn't exist", async () => {
|
it("should throw error if session doesn't exist", async () => {
|
||||||
await expect(triviaService.submitAnswer("invalid_session", TEST_USER_ID, true))
|
expect(triviaService.submitAnswer("invalid", TEST_USER_ID, true))
|
||||||
.rejects.toThrow('Session not found');
|
.rejects.toThrow("Session not found");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should prevent double submission", async () => {
|
it("should prevent double submission", async () => {
|
||||||
const session = await triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME);
|
const session = {
|
||||||
|
sessionId: "test_session",
|
||||||
|
userId: TEST_USER_ID,
|
||||||
|
question: { correctAnswer: "4" },
|
||||||
|
potentialReward: 100n
|
||||||
|
};
|
||||||
|
(triviaService as any).activeSessions.set("test_session", session);
|
||||||
|
|
||||||
await triviaService.submitAnswer(session.sessionId, TEST_USER_ID, true);
|
// Mock user for first success
|
||||||
|
mockFindFirst.mockResolvedValue({ id: 1n, balance: 950n });
|
||||||
|
|
||||||
// Try to submit again
|
await triviaService.submitAnswer("test_session", TEST_USER_ID, true);
|
||||||
await expect(triviaService.submitAnswer(session.sessionId, TEST_USER_ID, true))
|
|
||||||
.rejects.toThrow('Session not found');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("getSession", () => {
|
// Second try
|
||||||
it("should retrieve active session", async () => {
|
expect(triviaService.submitAnswer("test_session", TEST_USER_ID, true))
|
||||||
const session = await triviaService.startTrivia(TEST_USER_ID, TEST_USERNAME);
|
.rejects.toThrow("Session not found");
|
||||||
const retrieved = triviaService.getSession(session.sessionId);
|
|
||||||
|
|
||||||
expect(retrieved).toBeDefined();
|
|
||||||
expect(retrieved?.sessionId).toBe(session.sessionId);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return undefined for non-existent session", () => {
|
|
||||||
const retrieved = triviaService.getSession("invalid_session");
|
|
||||||
|
|
||||||
expect(retrieved).toBeUndefined();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
56
shared/scripts/db-backup.sh
Executable file
56
shared/scripts/db-backup.sh
Executable file
@@ -0,0 +1,56 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# =============================================================================
|
||||||
|
# Aurora Database Backup Script
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
if [ -f .env ]; then
|
||||||
|
set -a
|
||||||
|
source .env
|
||||||
|
set +a
|
||||||
|
fi
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||||
|
BACKUP_DIR="$PROJECT_DIR/shared/db/backups"
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
BACKUP_FILE="$BACKUP_DIR/backup_$TIMESTAMP.sql"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
RED='\033[0;31m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
echo -e "${YELLOW}💾 Starting database backup...${NC}"
|
||||||
|
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
|
if docker ps | grep -q aurora_db; then
|
||||||
|
# Try to dump the database
|
||||||
|
if docker exec aurora_db pg_dump -U "${DB_USER:-auroradev}" "${DB_NAME:-auroradev}" > "$BACKUP_FILE"; then
|
||||||
|
# Check if backup file is not empty
|
||||||
|
if [ -s "$BACKUP_FILE" ]; then
|
||||||
|
echo -e " ${GREEN}✓${NC} Backup successful!"
|
||||||
|
echo -e " 📂 File: $BACKUP_FILE"
|
||||||
|
echo -e " 📏 Size: $(du -h "$BACKUP_FILE" | cut -f1)"
|
||||||
|
|
||||||
|
# Keep only last 10 backups
|
||||||
|
cd "$BACKUP_DIR"
|
||||||
|
ls -t backup_*.sql | tail -n +11 | xargs -r rm --
|
||||||
|
else
|
||||||
|
echo -e " ${RED}✗${NC} Backup created but empty. Something went wrong."
|
||||||
|
rm -f "$BACKUP_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e " ${RED}✗${NC} pg_dump failed."
|
||||||
|
rm -f "$BACKUP_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e " ${RED}✗${NC} Database container (aurora_db) is not running!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
64
shared/scripts/db-restore.sh
Executable file
64
shared/scripts/db-restore.sh
Executable file
@@ -0,0 +1,64 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# =============================================================================
|
||||||
|
# Aurora Database Restore Script
|
||||||
|
# =============================================================================
|
||||||
|
# Usage: ./db-restore.sh [path/to/backup.sql]
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
if [ -f .env ]; then
|
||||||
|
set -a
|
||||||
|
source .env
|
||||||
|
set +a
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
RED='\033[0;31m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
if [ -z "$1" ]; then
|
||||||
|
echo -e "${RED}Error: Please specify the backup file to restore.${NC}"
|
||||||
|
echo "Usage: ./db-restore.sh <path-to-sql-file>"
|
||||||
|
echo "Available backups:"
|
||||||
|
ls -lh shared/db/backups/*.sql 2>/dev/null || echo " (No backups found in shared/db/backups)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
BACKUP_FILE="$1"
|
||||||
|
|
||||||
|
if [ ! -f "$BACKUP_FILE" ]; then
|
||||||
|
echo -e "${RED}Error: File not found: $BACKUP_FILE${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${YELLOW}⚠️ WARNING: This will OVERWRITE the current database!${NC}"
|
||||||
|
echo -e "Target Database: ${DB_NAME:-auroradev}"
|
||||||
|
echo -e "Backup File: $BACKUP_FILE"
|
||||||
|
echo ""
|
||||||
|
read -p "Are you sure you want to proceed? (y/N): " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
echo -e "${YELLOW}♻️ Restoring database...${NC}"
|
||||||
|
|
||||||
|
if docker ps | grep -q aurora_db; then
|
||||||
|
# Drop and recreate public schema to ensure clean slate, then restore
|
||||||
|
# Note: dependent on how the dump was created. Standard pg_dump usually includes CREATE commands if configured,
|
||||||
|
# but often it's data only or structure+data.
|
||||||
|
# For safety, we'll just pipe the file to psql.
|
||||||
|
|
||||||
|
cat "$BACKUP_FILE" | docker exec -i aurora_db psql -U "${DB_USER:-auroradev}" -d "${DB_NAME:-auroradev}"
|
||||||
|
|
||||||
|
echo -e " ${GREEN}✓${NC} Restore complete!"
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error: Database container (aurora_db) is not running!${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Operation cancelled."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
16
shared/scripts/debug-db.ts
Normal file
16
shared/scripts/debug-db.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
|
||||||
|
import postgres from "postgres";
|
||||||
|
|
||||||
|
const connectionString = "postgresql://auroradev:auroradev123@127.0.0.1:5432/aurora_test";
|
||||||
|
console.log("Connecting to:", connectionString);
|
||||||
|
|
||||||
|
const sql = postgres(connectionString);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await sql`SELECT 1 as val`;
|
||||||
|
console.log("Success:", result);
|
||||||
|
await sql.end();
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Connection failed:", e);
|
||||||
|
await sql.end();
|
||||||
|
}
|
||||||
131
shared/scripts/deploy.sh
Normal file
131
shared/scripts/deploy.sh
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# =============================================================================
|
||||||
|
# Aurora Production Deployment Script
|
||||||
|
# =============================================================================
|
||||||
|
# Run this script to deploy the latest version of Aurora
|
||||||
|
# Usage: bash deploy.sh
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||||
|
|
||||||
|
echo -e "${GREEN}╔══════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${GREEN}║ Aurora Deployment Script ║${NC}"
|
||||||
|
echo -e "${GREEN}╚══════════════════════════════════════════╝${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
cd "$PROJECT_DIR"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Pre-flight Checks
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[1/5] Running pre-flight checks...${NC}"
|
||||||
|
|
||||||
|
# Check if .env exists
|
||||||
|
if [ ! -f .env ]; then
|
||||||
|
echo -e "${RED}Error: .env file not found${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if Docker is running
|
||||||
|
if ! docker info &>/dev/null; then
|
||||||
|
echo -e "${RED}Error: Docker is not running${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e " ${GREEN}✓${NC} Pre-flight checks passed"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Backup Database (optional but recommended)
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[2/5] Creating database backup...${NC}"
|
||||||
|
|
||||||
|
BACKUP_DIR="$PROJECT_DIR/shared/db/backups"
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
|
if docker ps | grep -q aurora_db; then
|
||||||
|
BACKUP_FILE="$BACKUP_DIR/backup_$(date +%Y%m%d_%H%M%S).sql"
|
||||||
|
docker exec aurora_db pg_dump -U "${DB_USER:-auroradev}" "${DB_NAME:-auroradev}" > "$BACKUP_FILE" 2>/dev/null || true
|
||||||
|
if [ -f "$BACKUP_FILE" ] && [ -s "$BACKUP_FILE" ]; then
|
||||||
|
echo -e " ${GREEN}✓${NC} Database backed up to: $BACKUP_FILE"
|
||||||
|
else
|
||||||
|
echo -e " ${YELLOW}⚠${NC} Database backup skipped (container not running or empty)"
|
||||||
|
rm -f "$BACKUP_FILE"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e " ${YELLOW}⚠${NC} Database backup skipped (container not running)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Pull Latest Code (if using git)
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[3/5] Pulling latest code...${NC}"
|
||||||
|
|
||||||
|
if [ -d .git ]; then
|
||||||
|
git pull origin main 2>/dev/null || git pull origin master 2>/dev/null || echo " Skipping git pull"
|
||||||
|
echo -e " ${GREEN}✓${NC} Code updated"
|
||||||
|
else
|
||||||
|
echo -e " ${YELLOW}⚠${NC} Not a git repository, skipping pull"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Build and Deploy
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[4/5] Building and deploying containers...${NC}"
|
||||||
|
|
||||||
|
# Build the new image
|
||||||
|
docker compose -f docker-compose.prod.yml build --no-cache
|
||||||
|
|
||||||
|
# Stop and remove old containers, start new ones
|
||||||
|
docker compose -f docker-compose.prod.yml down
|
||||||
|
docker compose -f docker-compose.prod.yml up -d
|
||||||
|
|
||||||
|
echo -e " ${GREEN}✓${NC} Containers deployed"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Health Check
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[5/5] Waiting for health checks...${NC}"
|
||||||
|
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
# Check container status
|
||||||
|
if docker ps | grep -q "aurora_app.*healthy"; then
|
||||||
|
echo -e " ${GREEN}✓${NC} aurora_app is healthy"
|
||||||
|
else
|
||||||
|
echo -e " ${YELLOW}⚠${NC} aurora_app health check pending (may take up to 60s)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if docker ps | grep -q "aurora_db.*healthy"; then
|
||||||
|
echo -e " ${GREEN}✓${NC} aurora_db is healthy"
|
||||||
|
else
|
||||||
|
echo -e " ${YELLOW}⚠${NC} aurora_db health check pending"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Cleanup
|
||||||
|
# =============================================================================
|
||||||
|
echo ""
|
||||||
|
echo -e "${YELLOW}Cleaning up old Docker images...${NC}"
|
||||||
|
docker image prune -f
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Summary
|
||||||
|
# =============================================================================
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}╔══════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${GREEN}║ Deployment Complete! 🚀 ║${NC}"
|
||||||
|
echo -e "${GREEN}╚══════════════════════════════════════════╝${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "Container Status:"
|
||||||
|
docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" | grep aurora
|
||||||
|
echo ""
|
||||||
|
echo -e "View logs with: ${YELLOW}docker logs -f aurora_app${NC}"
|
||||||
98
shared/scripts/docker-cleanup.sh
Executable file
98
shared/scripts/docker-cleanup.sh
Executable file
@@ -0,0 +1,98 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Cleanup script for Docker resources
|
||||||
|
# Use: ./shared/scripts/docker-cleanup.sh
|
||||||
|
# Use: ./shared/scripts/docker-cleanup.sh --full (for aggressive cleanup)
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "🧹 Aurora Docker Cleanup"
|
||||||
|
echo "========================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Show current disk usage first
|
||||||
|
echo "📊 Current Docker disk usage:"
|
||||||
|
docker system df
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Stop running containers for this project
|
||||||
|
echo "📦 Stopping Aurora containers..."
|
||||||
|
docker compose down 2>/dev/null || true
|
||||||
|
|
||||||
|
# Remove dangling images (untagged images from failed builds)
|
||||||
|
echo ""
|
||||||
|
echo "🗑️ Removing dangling images..."
|
||||||
|
docker image prune -f
|
||||||
|
|
||||||
|
# Check for --full flag for aggressive cleanup
|
||||||
|
if [[ "$1" == "--full" ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "🔥 Full cleanup mode - removing all unused Docker resources..."
|
||||||
|
|
||||||
|
# Remove all unused images, not just dangling ones
|
||||||
|
echo " → Removing unused images..."
|
||||||
|
docker image prune -a -f
|
||||||
|
|
||||||
|
# Remove build cache
|
||||||
|
echo " → Removing build cache..."
|
||||||
|
docker builder prune -a -f
|
||||||
|
|
||||||
|
# Remove unused volumes (except named ones we need)
|
||||||
|
echo " → Removing unused volumes..."
|
||||||
|
docker volume prune -f
|
||||||
|
|
||||||
|
# Remove unused networks
|
||||||
|
echo " → Removing unused networks..."
|
||||||
|
docker network prune -f
|
||||||
|
|
||||||
|
# Remove node_modules volumes
|
||||||
|
echo " → Removing node_modules volumes..."
|
||||||
|
docker volume rm aurora_app_node_modules aurora_web_node_modules 2>/dev/null || true
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Full cleanup complete!"
|
||||||
|
else
|
||||||
|
# Interactive mode
|
||||||
|
echo ""
|
||||||
|
read -p "🔧 Remove Docker build cache? (y/N): " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
docker builder prune -f
|
||||||
|
echo "✓ Build cache cleared"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
read -p "🖼️ Remove ALL unused images (not just dangling)? (y/N): " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
docker image prune -a -f
|
||||||
|
echo "✓ Unused images removed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
read -p "📁 Remove node_modules volumes? (forces fresh install) (y/N): " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
docker volume rm aurora_app_node_modules aurora_web_node_modules 2>/dev/null || true
|
||||||
|
echo "✓ Node modules volumes removed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
read -p "🧨 Run full system prune (removes ALL unused data)? (y/N): " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
docker system prune -a -f --volumes
|
||||||
|
echo "✓ Full system prune complete"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Cleanup complete!"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "📊 Docker disk usage after cleanup:"
|
||||||
|
docker system df
|
||||||
|
echo ""
|
||||||
|
echo "💡 Tip: Check container logs with: sudo du -sh /var/lib/docker/containers/*/*.log"
|
||||||
|
echo "💡 Tip: Truncate logs with: sudo truncate -s 0 /var/lib/docker/containers/*/*.log"
|
||||||
|
echo ""
|
||||||
|
echo "Run 'docker compose up --build' to rebuild"
|
||||||
38
shared/scripts/logs.sh
Executable file
38
shared/scripts/logs.sh
Executable file
@@ -0,0 +1,38 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# =============================================================================
|
||||||
|
# Aurora Log Viewer
|
||||||
|
# =============================================================================
|
||||||
|
# Usage: ./logs.sh [app|db|all] [-f]
|
||||||
|
# Default: app container, follow mode
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
SERVICE=${1:-app}
|
||||||
|
FOLLOW="-f"
|
||||||
|
|
||||||
|
if [[ "$1" == "-f" ]]; then
|
||||||
|
SERVICE="app"
|
||||||
|
FOLLOW="-f"
|
||||||
|
elif [[ "$2" == "-f" ]]; then
|
||||||
|
FOLLOW="-f"
|
||||||
|
elif [[ "$2" == "--no-follow" ]]; then
|
||||||
|
FOLLOW=""
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "📋 Fetching logs for service: $SERVICE..."
|
||||||
|
|
||||||
|
case $SERVICE in
|
||||||
|
app)
|
||||||
|
docker compose logs $FOLLOW app
|
||||||
|
;;
|
||||||
|
db)
|
||||||
|
docker compose logs $FOLLOW db
|
||||||
|
;;
|
||||||
|
all)
|
||||||
|
docker compose logs $FOLLOW
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown service: $SERVICE"
|
||||||
|
echo "Usage: ./logs.sh [app|db|all] [-f]"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Load environment variables
|
|
||||||
if [ -f .env ]; then
|
|
||||||
set -a
|
|
||||||
source .env
|
|
||||||
set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$VPS_HOST" ] || [ -z "$VPS_USER" ]; then
|
|
||||||
echo "Error: VPS_HOST and VPS_USER must be set in .env"
|
|
||||||
echo "Please add them to your .env file:"
|
|
||||||
echo "VPS_USER=your-username"
|
|
||||||
echo "VPS_HOST=your-ip-address"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
DASHBOARD_PORT=${DASHBOARD_PORT:-3000}
|
|
||||||
|
|
||||||
echo "🌐 Establishing secure tunnel to Aurora Dashboard..."
|
|
||||||
echo "📊 Dashboard will be accessible at: http://localhost:$DASHBOARD_PORT"
|
|
||||||
echo "Press Ctrl+C to stop the connection."
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Function to open browser (cross-platform)
|
|
||||||
open_browser() {
|
|
||||||
sleep 2
|
|
||||||
if command -v open &> /dev/null; then
|
|
||||||
open "http://localhost:$DASHBOARD_PORT"
|
|
||||||
elif command -v xdg-open &> /dev/null; then
|
|
||||||
xdg-open "http://localhost:$DASHBOARD_PORT"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if autossh is available
|
|
||||||
if command -v autossh &> /dev/null; then
|
|
||||||
SSH_CMD="autossh -M 0 -o ServerAliveInterval=30 -o ServerAliveCountMax=3"
|
|
||||||
else
|
|
||||||
SSH_CMD="ssh -o ServerAliveInterval=30 -o ServerAliveCountMax=3"
|
|
||||||
fi
|
|
||||||
|
|
||||||
open_browser &
|
|
||||||
$SSH_CMD -N -L $DASHBOARD_PORT:127.0.0.1:$DASHBOARD_PORT $VPS_USER@$VPS_HOST
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Load environment variables
|
|
||||||
if [ -f .env ]; then
|
|
||||||
# export $(grep -v '^#' .env | xargs) # Use a safer way if possible, but for simple .env this often works.
|
|
||||||
# Better way to source .env without exporting everything to shell if we just want to use them in script:
|
|
||||||
set -a
|
|
||||||
source .env
|
|
||||||
set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$VPS_HOST" ] || [ -z "$VPS_USER" ]; then
|
|
||||||
echo "Error: VPS_HOST and VPS_USER must be set in .env"
|
|
||||||
echo "Please add them to your .env file:"
|
|
||||||
echo "VPS_USER=your-username"
|
|
||||||
echo "VPS_HOST=your-ip-address"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "🔮 Establishing secure tunnel to Drizzle Studio..."
|
|
||||||
echo ""
|
|
||||||
echo "📚 Open this URL in your browser:"
|
|
||||||
echo " https://local.drizzle.studio?host=127.0.0.1&port=4983"
|
|
||||||
echo ""
|
|
||||||
echo "💡 Note: Drizzle Studio works via their proxy service, not direct localhost."
|
|
||||||
echo "Press Ctrl+C to stop the connection."
|
|
||||||
|
|
||||||
# -N means "Do not execute a remote command". -L is for local port forwarding.
|
|
||||||
ssh -N -L 4983:127.0.0.1:4983 $VPS_USER@$VPS_HOST
|
|
||||||
160
shared/scripts/setup-server.sh
Normal file
160
shared/scripts/setup-server.sh
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# =============================================================================
|
||||||
|
# Server Setup Script for Aurora Production Deployment
|
||||||
|
# =============================================================================
|
||||||
|
# Run this script ONCE on a fresh server to configure security settings.
|
||||||
|
# Usage: sudo bash setup-server.sh
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
echo -e "${GREEN}╔══════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${GREEN}║ Aurora Server Security Setup Script ║${NC}"
|
||||||
|
echo -e "${GREEN}╚══════════════════════════════════════════╝${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if running as root
|
||||||
|
if [ "$EUID" -ne 0 ]; then
|
||||||
|
echo -e "${RED}Error: Please run as root (sudo)${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 1. Create Deploy User
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[1/5] Creating deploy user...${NC}"
|
||||||
|
|
||||||
|
DEPLOY_USER="deploy"
|
||||||
|
|
||||||
|
if id "$DEPLOY_USER" &>/dev/null; then
|
||||||
|
echo -e " User '$DEPLOY_USER' already exists, skipping..."
|
||||||
|
else
|
||||||
|
adduser --disabled-password --gecos "" $DEPLOY_USER
|
||||||
|
echo -e " ${GREEN}✓${NC} Created user '$DEPLOY_USER'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add to docker group
|
||||||
|
usermod -aG docker $DEPLOY_USER 2>/dev/null || groupadd docker && usermod -aG docker $DEPLOY_USER
|
||||||
|
echo -e " ${GREEN}✓${NC} Added '$DEPLOY_USER' to docker group"
|
||||||
|
|
||||||
|
# Add to sudo group (optional - remove if you don't want sudo access)
|
||||||
|
usermod -aG sudo $DEPLOY_USER
|
||||||
|
echo -e " ${GREEN}✓${NC} Added '$DEPLOY_USER' to sudo group"
|
||||||
|
|
||||||
|
# Copy SSH keys from root to deploy user
|
||||||
|
if [ -d /root/.ssh ]; then
|
||||||
|
mkdir -p /home/$DEPLOY_USER/.ssh
|
||||||
|
cp /root/.ssh/authorized_keys /home/$DEPLOY_USER/.ssh/ 2>/dev/null || true
|
||||||
|
chown -R $DEPLOY_USER:$DEPLOY_USER /home/$DEPLOY_USER/.ssh
|
||||||
|
chmod 700 /home/$DEPLOY_USER/.ssh
|
||||||
|
chmod 600 /home/$DEPLOY_USER/.ssh/authorized_keys 2>/dev/null || true
|
||||||
|
echo -e " ${GREEN}✓${NC} Copied SSH keys to '$DEPLOY_USER'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 2. Configure UFW Firewall
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[2/5] Configuring UFW firewall...${NC}"
|
||||||
|
|
||||||
|
apt-get update -qq
|
||||||
|
apt-get install -y -qq ufw
|
||||||
|
|
||||||
|
ufw default deny incoming
|
||||||
|
ufw default allow outgoing
|
||||||
|
ufw allow ssh
|
||||||
|
# Add more rules as needed:
|
||||||
|
# ufw allow 80/tcp # HTTP
|
||||||
|
# ufw allow 443/tcp # HTTPS
|
||||||
|
|
||||||
|
# Enable UFW (non-interactive)
|
||||||
|
echo "y" | ufw enable
|
||||||
|
echo -e " ${GREEN}✓${NC} UFW firewall enabled and configured"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 3. Install and Configure Fail2ban
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[3/5] Installing fail2ban...${NC}"
|
||||||
|
|
||||||
|
apt-get install -y -qq fail2ban
|
||||||
|
|
||||||
|
# Create local jail configuration
|
||||||
|
cat > /etc/fail2ban/jail.local << 'EOF'
|
||||||
|
[DEFAULT]
|
||||||
|
bantime = 1h
|
||||||
|
findtime = 10m
|
||||||
|
maxretry = 5
|
||||||
|
|
||||||
|
[sshd]
|
||||||
|
enabled = true
|
||||||
|
port = ssh
|
||||||
|
filter = sshd
|
||||||
|
logpath = /var/log/auth.log
|
||||||
|
maxretry = 3
|
||||||
|
bantime = 24h
|
||||||
|
EOF
|
||||||
|
|
||||||
|
systemctl enable fail2ban
|
||||||
|
systemctl restart fail2ban
|
||||||
|
echo -e " ${GREEN}✓${NC} Fail2ban installed and configured"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 4. Harden SSH Configuration
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[4/5] Hardening SSH configuration...${NC}"
|
||||||
|
|
||||||
|
SSHD_CONFIG="/etc/ssh/sshd_config"
|
||||||
|
|
||||||
|
# Backup original config
|
||||||
|
cp $SSHD_CONFIG ${SSHD_CONFIG}.backup
|
||||||
|
|
||||||
|
# Apply hardening settings
|
||||||
|
sed -i 's/^#\?PermitRootLogin.*/PermitRootLogin no/' $SSHD_CONFIG
|
||||||
|
sed -i 's/^#\?PasswordAuthentication.*/PasswordAuthentication no/' $SSHD_CONFIG
|
||||||
|
sed -i 's/^#\?PubkeyAuthentication.*/PubkeyAuthentication yes/' $SSHD_CONFIG
|
||||||
|
sed -i 's/^#\?X11Forwarding.*/X11Forwarding no/' $SSHD_CONFIG
|
||||||
|
sed -i 's/^#\?MaxAuthTries.*/MaxAuthTries 3/' $SSHD_CONFIG
|
||||||
|
|
||||||
|
# Validate SSH config before restarting
|
||||||
|
if sshd -t; then
|
||||||
|
systemctl reload sshd
|
||||||
|
echo -e " ${GREEN}✓${NC} SSH hardened (root login disabled, password auth disabled)"
|
||||||
|
else
|
||||||
|
echo -e " ${RED}✗${NC} SSH config validation failed, restoring backup..."
|
||||||
|
cp ${SSHD_CONFIG}.backup $SSHD_CONFIG
|
||||||
|
fi
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 5. System Updates
|
||||||
|
# =============================================================================
|
||||||
|
echo -e "${YELLOW}[5/5] Installing system updates...${NC}"
|
||||||
|
|
||||||
|
apt-get upgrade -y -qq
|
||||||
|
apt-get autoremove -y -qq
|
||||||
|
echo -e " ${GREEN}✓${NC} System updated"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Summary
|
||||||
|
# =============================================================================
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}╔══════════════════════════════════════════╗${NC}"
|
||||||
|
echo -e "${GREEN}║ Setup Complete! ║${NC}"
|
||||||
|
echo -e "${GREEN}╚══════════════════════════════════════════╝${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "Next steps:"
|
||||||
|
echo -e " 1. Update your local .env file:"
|
||||||
|
echo -e " ${YELLOW}VPS_USER=deploy${NC}"
|
||||||
|
echo -e ""
|
||||||
|
echo -e " 2. Test SSH access with the new user:"
|
||||||
|
echo -e " ${YELLOW}ssh deploy@<your-server-ip>${NC}"
|
||||||
|
echo -e ""
|
||||||
|
echo -e " 3. Deploy the application:"
|
||||||
|
echo -e " ${YELLOW}cd /home/deploy/Aurora && docker compose -f docker-compose.prod.yml up -d${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "${RED}⚠️ IMPORTANT: Test SSH access with 'deploy' user BEFORE logging out!${NC}"
|
||||||
|
echo -e "${RED} Keep this root session open until you confirm 'deploy' user works.${NC}"
|
||||||
108
shared/scripts/simulate-ci.sh
Executable file
108
shared/scripts/simulate-ci.sh
Executable file
@@ -0,0 +1,108 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
DB_CONTAINER_NAME="aurora_ci_test_db"
|
||||||
|
DB_PORT="5433"
|
||||||
|
DB_USER="postgres"
|
||||||
|
DB_PASS="postgres"
|
||||||
|
DB_NAME="aurora_test"
|
||||||
|
|
||||||
|
echo "🚀 Starting CI Simulation..."
|
||||||
|
|
||||||
|
# Cleanup previous run if exists
|
||||||
|
if docker ps -a --format '{{.Names}}' | grep -q "^${DB_CONTAINER_NAME}$"; then
|
||||||
|
echo "🧹 Cleaning up old container..."
|
||||||
|
docker rm -f $DB_CONTAINER_NAME
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 1. Start Postgres Service
|
||||||
|
echo "🐳 Starting temporary PostgreSQL container on port $DB_PORT..."
|
||||||
|
docker run -d \
|
||||||
|
--name $DB_CONTAINER_NAME \
|
||||||
|
-e POSTGRES_USER=$DB_USER \
|
||||||
|
-e POSTGRES_PASSWORD=$DB_PASS \
|
||||||
|
-e POSTGRES_DB=$DB_NAME \
|
||||||
|
-p $DB_PORT:5432 \
|
||||||
|
postgres:17-alpine
|
||||||
|
|
||||||
|
echo "⏳ Waiting for database to be ready..."
|
||||||
|
# Wait for healthy
|
||||||
|
for i in {1..30}; do
|
||||||
|
if docker exec $DB_CONTAINER_NAME pg_isready -U $DB_USER > /dev/null 2>&1; then
|
||||||
|
echo "✅ Database is ready!"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo " ...waiting ($i/30)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
# Define connection string
|
||||||
|
export DATABASE_URL="postgresql://$DB_USER:$DB_PASS@127.0.0.1:$DB_PORT/$DB_NAME"
|
||||||
|
|
||||||
|
# 2. Create Config File (Match deploy.yml)
|
||||||
|
echo "📝 Creating shared/config/config.json..."
|
||||||
|
mkdir -p shared/config
|
||||||
|
cat <<EOF > shared/config/config.json
|
||||||
|
{
|
||||||
|
"leveling": { "base": 100, "exponent": 2.5, "chat": { "cooldownMs": 60000, "minXp": 15, "maxXp": 25 } },
|
||||||
|
"economy": {
|
||||||
|
"daily": { "amount": "100", "streakBonus": "10", "weeklyBonus": "50", "cooldownMs": 86400000 },
|
||||||
|
"transfers": { "allowSelfTransfer": false, "minAmount": "1" },
|
||||||
|
"exam": { "multMin": 0.05, "multMax": 0.03 }
|
||||||
|
},
|
||||||
|
"inventory": { "maxStackSize": "99", "maxSlots": 50 },
|
||||||
|
"commands": {},
|
||||||
|
"lootdrop": {
|
||||||
|
"activityWindowMs": 120000, "minMessages": 1, "spawnChance": 1, "cooldownMs": 3000,
|
||||||
|
"reward": { "min": 40, "max": 150, "currency": "Astral Units" }
|
||||||
|
},
|
||||||
|
"studentRole": "123", "visitorRole": "456", "colorRoles": [],
|
||||||
|
"moderation": {
|
||||||
|
"prune": { "maxAmount": 100, "confirmThreshold": 50, "batchSize": 100, "batchDelayMs": 1000 },
|
||||||
|
"cases": { "dmOnWarn": false }
|
||||||
|
},
|
||||||
|
"trivia": {
|
||||||
|
"entryFee": "50", "rewardMultiplier": 1.5, "timeoutSeconds": 30, "cooldownMs": 60000,
|
||||||
|
"categories": [], "difficulty": "random"
|
||||||
|
},
|
||||||
|
"system": {}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# 3. Setup Database Schema
|
||||||
|
echo "📜 Pushing schema to test database..."
|
||||||
|
bun run db:push:local
|
||||||
|
|
||||||
|
# 4. Export Test Env Vars
|
||||||
|
export DISCORD_BOT_TOKEN="test_token"
|
||||||
|
export DISCORD_CLIENT_ID="123456789"
|
||||||
|
export DISCORD_GUILD_ID="123456789"
|
||||||
|
export ADMIN_TOKEN="admin_token_123"
|
||||||
|
export LOG_LEVEL="error"
|
||||||
|
|
||||||
|
# 5. Run Tests
|
||||||
|
echo "🧪 Running Tests..."
|
||||||
|
if [ -n "$1" ]; then
|
||||||
|
echo "Running specific test: $1"
|
||||||
|
if bun test "$1"; then
|
||||||
|
echo "✅ Specific Test Passed!"
|
||||||
|
EXIT_CODE=0
|
||||||
|
else
|
||||||
|
echo "❌ Specific Test Failed!"
|
||||||
|
EXIT_CODE=1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if bash shared/scripts/test-sequential.sh; then
|
||||||
|
echo "✅ CI Simulation Passed!"
|
||||||
|
EXIT_CODE=0
|
||||||
|
else
|
||||||
|
echo "❌ CI Simulation Failed!"
|
||||||
|
EXIT_CODE=1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 6. Cleanup
|
||||||
|
echo "🧹 Cleaning up container..."
|
||||||
|
docker rm -f $DB_CONTAINER_NAME
|
||||||
|
|
||||||
|
exit $EXIT_CODE
|
||||||
36
shared/scripts/test-sequential.sh
Executable file
36
shared/scripts/test-sequential.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "🔍 Finding test files..."
|
||||||
|
TEST_FILES=$(find . -name "*.test.ts" -not -path "*/node_modules/*")
|
||||||
|
|
||||||
|
if [ -z "$TEST_FILES" ]; then
|
||||||
|
echo "⚠️ No test files found!"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "🧪 Running tests sequentially..."
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for FILE in $TEST_FILES; do
|
||||||
|
echo "---------------------------------------------------"
|
||||||
|
echo "running: $FILE"
|
||||||
|
if bun test "$FILE"; then
|
||||||
|
echo "✅ passed: $FILE"
|
||||||
|
else
|
||||||
|
echo "❌ failed: $FILE"
|
||||||
|
FAILED=1
|
||||||
|
# Fail fast
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ $FAILED -eq 0 ]; then
|
||||||
|
echo "---------------------------------------------------"
|
||||||
|
echo "✅ All tests passed!"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "---------------------------------------------------"
|
||||||
|
echo "❌ Some tests failed."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -4,6 +4,7 @@ import "./index.css";
|
|||||||
import { DesignSystem } from "./pages/DesignSystem";
|
import { DesignSystem } from "./pages/DesignSystem";
|
||||||
import { AdminQuests } from "./pages/AdminQuests";
|
import { AdminQuests } from "./pages/AdminQuests";
|
||||||
import { AdminOverview } from "./pages/admin/Overview";
|
import { AdminOverview } from "./pages/admin/Overview";
|
||||||
|
import { AdminItems } from "./pages/admin/Items";
|
||||||
|
|
||||||
import { Home } from "./pages/Home";
|
import { Home } from "./pages/Home";
|
||||||
import { Toaster } from "sonner";
|
import { Toaster } from "sonner";
|
||||||
@@ -28,6 +29,7 @@ export function App() {
|
|||||||
<Route path="/admin" element={<Navigate to="/admin/overview" replace />} />
|
<Route path="/admin" element={<Navigate to="/admin/overview" replace />} />
|
||||||
<Route path="/admin/overview" element={<AdminOverview />} />
|
<Route path="/admin/overview" element={<AdminOverview />} />
|
||||||
<Route path="/admin/quests" element={<AdminQuests />} />
|
<Route path="/admin/quests" element={<AdminQuests />} />
|
||||||
|
<Route path="/admin/items" element={<AdminItems />} />
|
||||||
|
|
||||||
|
|
||||||
<Route path="/settings" element={<SettingsLayout />}>
|
<Route path="/settings" element={<SettingsLayout />}>
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import * as React from "react"
|
import * as React from "react"
|
||||||
import { useLocation, type Location } from "react-router-dom"
|
import { useLocation, type Location } from "react-router-dom"
|
||||||
import { Home, Palette, ShieldCheck, Settings, LayoutDashboard, Trophy, SlidersHorizontal, Coins, Cog, UserCog, type LucideIcon } from "lucide-react"
|
import { Home, Palette, ShieldCheck, Settings, LayoutDashboard, Trophy, SlidersHorizontal, Coins, Cog, UserCog, Package, type LucideIcon } from "lucide-react"
|
||||||
|
|
||||||
export interface NavSubItem {
|
export interface NavSubItem {
|
||||||
title: string
|
title: string
|
||||||
@@ -46,6 +46,7 @@ const NAV_CONFIG: NavConfigItem[] = [
|
|||||||
subItems: [
|
subItems: [
|
||||||
{ title: "Overview", url: "/admin/overview", icon: LayoutDashboard },
|
{ title: "Overview", url: "/admin/overview", icon: LayoutDashboard },
|
||||||
{ title: "Quests", url: "/admin/quests", icon: Trophy },
|
{ title: "Quests", url: "/admin/quests", icon: Trophy },
|
||||||
|
{ title: "Items", url: "/admin/items", icon: Package },
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
20
web/src/pages/admin/Items.tsx
Normal file
20
web/src/pages/admin/Items.tsx
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import React from "react";
|
||||||
|
import { SectionHeader } from "../../components/section-header";
|
||||||
|
|
||||||
|
export function AdminItems() {
|
||||||
|
return (
|
||||||
|
<main className="pt-8 px-8 pb-12 max-w-7xl mx-auto space-y-12">
|
||||||
|
<SectionHeader
|
||||||
|
badge="Item Management"
|
||||||
|
title="Items"
|
||||||
|
description="Create and manage items for the Aurora RPG."
|
||||||
|
/>
|
||||||
|
|
||||||
|
<div className="animate-in fade-in slide-up duration-700">
|
||||||
|
<p className="text-muted-foreground">Items management coming soon...</p>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default AdminItems;
|
||||||
@@ -58,6 +58,7 @@ mock.module("../../bot/lib/clientStats", () => ({
|
|||||||
|
|
||||||
describe("WebServer Security & Limits", () => {
|
describe("WebServer Security & Limits", () => {
|
||||||
const port = 3001;
|
const port = 3001;
|
||||||
|
const hostname = "127.0.0.1";
|
||||||
let serverInstance: WebServerInstance | null = null;
|
let serverInstance: WebServerInstance | null = null;
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@@ -67,8 +68,8 @@ describe("WebServer Security & Limits", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("should reject more than 10 concurrent WebSocket connections", async () => {
|
test("should reject more than 10 concurrent WebSocket connections", async () => {
|
||||||
serverInstance = await createWebServer({ port, hostname: "localhost" });
|
serverInstance = await createWebServer({ port, hostname });
|
||||||
const wsUrl = `ws://localhost:${port}/ws`;
|
const wsUrl = `ws://${hostname}:${port}/ws`;
|
||||||
const sockets: WebSocket[] = [];
|
const sockets: WebSocket[] = [];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -95,9 +96,9 @@ describe("WebServer Security & Limits", () => {
|
|||||||
|
|
||||||
test("should return 200 for health check", async () => {
|
test("should return 200 for health check", async () => {
|
||||||
if (!serverInstance) {
|
if (!serverInstance) {
|
||||||
serverInstance = await createWebServer({ port, hostname: "localhost" });
|
serverInstance = await createWebServer({ port, hostname });
|
||||||
}
|
}
|
||||||
const response = await fetch(`http://localhost:${port}/api/health`);
|
const response = await fetch(`http://${hostname}:${port}/api/health`);
|
||||||
expect(response.status).toBe(200);
|
expect(response.status).toBe(200);
|
||||||
const data = (await response.json()) as { status: string };
|
const data = (await response.json()) as { status: string };
|
||||||
expect(data.status).toBe("ok");
|
expect(data.status).toBe("ok");
|
||||||
@@ -105,7 +106,7 @@ describe("WebServer Security & Limits", () => {
|
|||||||
|
|
||||||
describe("Administrative Actions", () => {
|
describe("Administrative Actions", () => {
|
||||||
test("should allow administrative actions without token", async () => {
|
test("should allow administrative actions without token", async () => {
|
||||||
const response = await fetch(`http://localhost:${port}/api/actions/reload-commands`, {
|
const response = await fetch(`http://${hostname}:${port}/api/actions/reload-commands`, {
|
||||||
method: "POST"
|
method: "POST"
|
||||||
});
|
});
|
||||||
// Should be 200 (OK) or 500 (if underlying service fails, but NOT 401)
|
// Should be 200 (OK) or 500 (if underlying service fails, but NOT 401)
|
||||||
@@ -114,7 +115,7 @@ describe("WebServer Security & Limits", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("should reject maintenance mode with invalid payload", async () => {
|
test("should reject maintenance mode with invalid payload", async () => {
|
||||||
const response = await fetch(`http://localhost:${port}/api/actions/maintenance-mode`, {
|
const response = await fetch(`http://${hostname}:${port}/api/actions/maintenance-mode`, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json"
|
||||||
|
|||||||
Reference in New Issue
Block a user