This commit is contained in:
syntaxbullet
2026-01-30 13:44:04 +01:00
84 changed files with 5995 additions and 2195 deletions

View File

@@ -85,3 +85,9 @@ export enum TriviaCategory {
ANIMALS = 27,
ANIME_MANGA = 31,
}
export const BRANDING = {
COLOR: 0x00d4ff as const,
FOOTER_TEXT: 'Aurora' as const,
};

View File

@@ -17,5 +17,8 @@ export const EVENTS = {
RELOAD_COMMANDS: "actions:reload_commands",
CLEAR_CACHE: "actions:clear_cache",
MAINTENANCE_MODE: "actions:maintenance_mode",
},
QUEST: {
COMPLETED: "quest:completed",
}
} as const;

118
shared/lib/logger.test.ts Normal file
View File

@@ -0,0 +1,118 @@
import { expect, test, describe, beforeAll, afterAll, spyOn } from "bun:test";
import { logger } from "./logger";
import { existsSync, unlinkSync, readFileSync, writeFileSync } from "fs";
import { join } from "path";
describe("Logger", () => {
const logDir = join(process.cwd(), "logs");
const logFile = join(logDir, "error.log");
beforeAll(() => {
// Cleanup if exists
try {
if (existsSync(logFile)) unlinkSync(logFile);
} catch (e) {}
});
test("should log info messages to console with correct format", () => {
const spy = spyOn(console, "log");
const message = "Formatting test";
logger.info("system", message);
expect(spy).toHaveBeenCalled();
const callArgs = spy.mock.calls[0]?.[0];
expect(callArgs).toBeDefined();
if (callArgs) {
// Strict regex check for ISO timestamp and format
const regex = /^\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z\] \[INFO\] \[SYSTEM\] Formatting test$/;
expect(callArgs).toMatch(regex);
}
spy.mockRestore();
});
test("should write error logs to file with stack trace", async () => {
const errorMessage = "Test error message";
const testError = new Error("Source error");
logger.error("system", errorMessage, testError);
// Polling for file write instead of fixed timeout
let content = "";
for (let i = 0; i < 20; i++) {
if (existsSync(logFile)) {
content = readFileSync(logFile, "utf-8");
if (content.includes("Source error")) break;
}
await new Promise(resolve => setTimeout(resolve, 50));
}
expect(content).toMatch(/^\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z\] \[ERROR\] \[SYSTEM\] Test error message: Source error/);
expect(content).toContain("Stack Trace:");
expect(content).toContain("Error: Source error");
expect(content).toContain("logger.test.ts");
});
test("should handle log directory creation failures gracefully", async () => {
const consoleSpy = spyOn(console, "error");
// We trigger an error by trying to use a path that is a file where a directory should be
const triggerFile = join(process.cwd(), "logs_fail_trigger");
try {
writeFileSync(triggerFile, "not a directory");
// Manually override paths for this test instance
const originalLogDir = (logger as any).logDir;
const originalLogPath = (logger as any).errorLogPath;
(logger as any).logDir = triggerFile;
(logger as any).errorLogPath = join(triggerFile, "error.log");
(logger as any).initialized = false;
logger.error("system", "This should fail directory creation");
// Wait for async initialization attempt
await new Promise(resolve => setTimeout(resolve, 100));
expect(consoleSpy).toHaveBeenCalled();
expect(consoleSpy.mock.calls.some(call =>
String(call[0]).includes("Failed to initialize logger directory")
)).toBe(true);
// Reset logger state
(logger as any).logDir = originalLogDir;
(logger as any).errorLogPath = originalLogPath;
(logger as any).initialized = false;
} finally {
if (existsSync(triggerFile)) unlinkSync(triggerFile);
consoleSpy.mockRestore();
}
});
test("should include complex data objects in logs", () => {
const spy = spyOn(console, "log");
const data = { userId: "123", tags: ["test"] };
logger.info("bot", "Message with data", data);
expect(spy).toHaveBeenCalled();
const callArgs = spy.mock.calls[0]?.[0];
expect(callArgs).toBeDefined();
if (callArgs) {
expect(callArgs).toContain(` | Data: ${JSON.stringify(data)}`);
}
spy.mockRestore();
});
test("should handle circular references in data objects", () => {
const spy = spyOn(console, "log");
const data: any = { name: "circular" };
data.self = data;
logger.info("bot", "Circular test", data);
expect(spy).toHaveBeenCalled();
const callArgs = spy.mock.calls[0]?.[0];
expect(callArgs).toContain("[Circular]");
spy.mockRestore();
});
});

162
shared/lib/logger.ts Normal file
View File

@@ -0,0 +1,162 @@
import { join, resolve } from "path";
import { appendFile, mkdir, stat } from "fs/promises";
import { existsSync } from "fs";
export enum LogLevel {
DEBUG = 0,
INFO = 1,
WARN = 2,
ERROR = 3,
}
const LogLevelNames = {
[LogLevel.DEBUG]: "DEBUG",
[LogLevel.INFO]: "INFO",
[LogLevel.WARN]: "WARN",
[LogLevel.ERROR]: "ERROR",
};
export type LogSource = "bot" | "web" | "shared" | "system";
export interface LogEntry {
timestamp: string;
level: string;
source: LogSource;
message: string;
data?: any;
stack?: string;
}
class Logger {
private logDir: string;
private errorLogPath: string;
private initialized: boolean = false;
private initPromise: Promise<void> | null = null;
constructor() {
// Use resolve with __dirname or process.cwd() but make it more robust
// Since this is in shared/lib/, we can try to find the project root
// For now, let's stick to a resolved path from process.cwd() or a safer alternative
this.logDir = resolve(process.cwd(), "logs");
this.errorLogPath = join(this.logDir, "error.log");
}
private async ensureInitialized() {
if (this.initialized) return;
if (this.initPromise) return this.initPromise;
this.initPromise = (async () => {
try {
await mkdir(this.logDir, { recursive: true });
this.initialized = true;
} catch (err: any) {
if (err.code === "EEXIST" || err.code === "ENOTDIR") {
try {
const stats = await stat(this.logDir);
if (stats.isDirectory()) {
this.initialized = true;
return;
}
} catch (statErr) {}
}
console.error(`[SYSTEM] Failed to initialize logger directory at ${this.logDir}:`, err);
} finally {
this.initPromise = null;
}
})();
return this.initPromise;
}
private safeStringify(data: any): string {
try {
return JSON.stringify(data);
} catch (err) {
const seen = new WeakSet();
return JSON.stringify(data, (key, value) => {
if (typeof value === "object" && value !== null) {
if (seen.has(value)) return "[Circular]";
seen.add(value);
}
return value;
});
}
}
private formatMessage(entry: LogEntry): string {
const dataStr = entry.data ? ` | Data: ${this.safeStringify(entry.data)}` : "";
const stackStr = entry.stack ? `\nStack Trace:\n${entry.stack}` : "";
return `[${entry.timestamp}] [${entry.level}] [${entry.source.toUpperCase()}] ${entry.message}${dataStr}${stackStr}`;
}
private async writeToErrorLog(formatted: string) {
await this.ensureInitialized();
try {
await appendFile(this.errorLogPath, formatted + "\n");
} catch (err) {
console.error("[SYSTEM] Failed to write to error log file:", err);
}
}
private log(level: LogLevel, source: LogSource, message: string, errorOrData?: any) {
const timestamp = new Date().toISOString();
const levelName = LogLevelNames[level];
const entry: LogEntry = {
timestamp,
level: levelName,
source,
message,
};
if (level === LogLevel.ERROR && errorOrData instanceof Error) {
entry.stack = errorOrData.stack;
entry.message = `${message}: ${errorOrData.message}`;
} else if (errorOrData !== undefined) {
entry.data = errorOrData;
}
const formatted = this.formatMessage(entry);
// Print to console
switch (level) {
case LogLevel.DEBUG:
console.debug(formatted);
break;
case LogLevel.INFO:
console.log(formatted);
break;
case LogLevel.WARN:
console.warn(formatted);
break;
case LogLevel.ERROR:
console.error(formatted);
break;
}
// Persistent error logging
if (level === LogLevel.ERROR) {
this.writeToErrorLog(formatted).catch(() => {
// Silently fail to avoid infinite loops
});
}
}
debug(source: LogSource, message: string, data?: any) {
this.log(LogLevel.DEBUG, source, message, data);
}
info(source: LogSource, message: string, data?: any) {
this.log(LogLevel.INFO, source, message, data);
}
warn(source: LogSource, message: string, data?: any) {
this.log(LogLevel.WARN, source, message, data);
}
error(source: LogSource, message: string, error?: any) {
this.log(LogLevel.ERROR, source, message, error);
}
}
export const logger = new Logger();

View File

@@ -1,5 +1,4 @@
import { describe, expect, test, mock, beforeEach, afterAll, spyOn } from "bun:test";
import * as fs from "fs/promises";
import { describe, expect, test, mock, beforeEach, afterAll } from "bun:test";
// Mock child_process BEFORE importing the service
const mockExec = mock((cmd: string, callback?: any) => {
@@ -8,23 +7,32 @@ const mockExec = mock((cmd: string, callback?: any) => {
return { unref: () => { } };
}
if (cmd.includes("git rev-parse")) {
callback(null, { stdout: "main\n" });
// Simulate successful command execution
let stdout = "";
if (cmd.includes("git rev-parse --abbrev-ref")) {
stdout = "main\n";
} else if (cmd.includes("git rev-parse --short")) {
stdout = "abc1234\n";
} else if (cmd.includes("git rev-parse HEAD")) {
stdout = "abc1234567890\n";
} else if (cmd.includes("git fetch")) {
callback(null, { stdout: "" });
stdout = "";
} else if (cmd.includes("git log")) {
callback(null, { stdout: "abcdef Update 1\n123456 Update 2" });
stdout = "abcdef|Update 1|Author1\n123456|Update 2|Author2";
} else if (cmd.includes("git diff")) {
callback(null, { stdout: "package.json\nsrc/index.ts" });
stdout = "package.json\nsrc/index.ts\nshared/lib/schema.ts";
} else if (cmd.includes("git reset")) {
callback(null, { stdout: "HEAD is now at abcdef Update 1" });
stdout = "HEAD is now at abcdef Update 1";
} else if (cmd.includes("bun install")) {
callback(null, { stdout: "Installed dependencies" });
stdout = "Installed dependencies";
} else if (cmd.includes("drizzle-kit migrate")) {
callback(null, { stdout: "Migrations applied" });
} else {
callback(null, { stdout: "" });
stdout = "Migrations applied";
} else if (cmd.includes("bun run build")) {
stdout = "Build completed";
}
callback(null, stdout, "");
});
mock.module("child_process", () => ({
@@ -32,9 +40,9 @@ mock.module("child_process", () => ({
}));
// Mock fs/promises
const mockWriteFile = mock((path: string, content: string) => Promise.resolve());
const mockReadFile = mock((path: string, encoding: string) => Promise.resolve("{}"));
const mockUnlink = mock((path: string) => Promise.resolve());
const mockWriteFile = mock((_path: string, _content: string) => Promise.resolve());
const mockReadFile = mock((_path: string, _encoding: string) => Promise.resolve("{}"));
const mockUnlink = mock((_path: string) => Promise.resolve());
mock.module("fs/promises", () => ({
writeFile: mockWriteFile,
@@ -43,9 +51,9 @@ mock.module("fs/promises", () => ({
}));
// Mock view module to avoid import issues
mock.module("./update.view", () => ({
getPostRestartEmbed: () => ({ title: "Update Complete" }),
getInstallingDependenciesEmbed: () => ({ title: "Installing..." }),
mock.module("@/modules/admin/update.view", () => ({
getPostRestartEmbed: () => ({ embeds: [{ title: "Update Complete" }], components: [] }),
getPostRestartProgressEmbed: () => ({ title: "Progress..." }),
}));
describe("UpdateService", () => {
@@ -72,7 +80,8 @@ describe("UpdateService", () => {
expect(result.hasUpdates).toBe(true);
expect(result.branch).toBe("main");
expect(result.log).toContain("Update 1");
expect(result.commits.length).toBeGreaterThan(0);
expect(result.commits[0].message).toContain("Update 1");
});
test("should call git rev-parse, fetch, and log commands", async () => {
@@ -83,43 +92,82 @@ describe("UpdateService", () => {
expect(calls.some((cmd: string) => cmd.includes("git fetch"))).toBe(true);
expect(calls.some((cmd: string) => cmd.includes("git log"))).toBe(true);
});
test("should include requirements in the response", async () => {
const result = await UpdateService.checkForUpdates();
expect(result.requirements).toBeDefined();
expect(result.requirements.needsRootInstall).toBe(true); // package.json is in mock
expect(result.requirements.needsMigrations).toBe(true); // schema.ts is in mock
expect(result.requirements.changedFiles).toContain("package.json");
});
});
describe("performUpdate", () => {
test("should run git reset --hard with correct branch", async () => {
await UpdateService.performUpdate("main");
const lastCall = mockExec.mock.lastCall;
expect(lastCall).toBeDefined();
expect(lastCall![0]).toContain("git reset --hard origin/main");
const calls = mockExec.mock.calls.map((c: any) => c[0]);
expect(calls.some((cmd: string) => cmd.includes("git reset --hard origin/main"))).toBe(true);
});
});
describe("checkUpdateRequirements", () => {
describe("checkUpdateRequirements (deprecated)", () => {
test("should detect package.json and schema.ts changes", async () => {
const result = await UpdateService.checkUpdateRequirements("main");
expect(result.needsInstall).toBe(true);
expect(result.needsMigrations).toBe(false); // mock doesn't include schema.ts
expect(result.needsRootInstall).toBe(true);
expect(result.needsMigrations).toBe(true);
expect(result.error).toBeUndefined();
});
test("should call git diff with correct branch", async () => {
await UpdateService.checkUpdateRequirements("develop");
const lastCall = mockExec.mock.lastCall;
expect(lastCall).toBeDefined();
expect(lastCall![0]).toContain("git diff HEAD..origin/develop");
const calls = mockExec.mock.calls.map((c: any) => c[0]);
expect(calls.some((cmd: string) => cmd.includes("git diff HEAD..origin/develop"))).toBe(true);
});
});
describe("installDependencies", () => {
test("should run bun install and return output", async () => {
const output = await UpdateService.installDependencies();
test("should run bun install for root only", async () => {
const output = await UpdateService.installDependencies({ root: true, web: false });
expect(output).toBe("Installed dependencies");
const lastCall = mockExec.mock.lastCall;
expect(lastCall![0]).toBe("bun install");
expect(output).toContain("Root");
const calls = mockExec.mock.calls.map((c: any) => c[0]);
expect(calls.some((cmd: string) => cmd === "bun install")).toBe(true);
});
test("should run bun install for both root and web in parallel", async () => {
const output = await UpdateService.installDependencies({ root: true, web: true });
expect(output).toContain("Root");
expect(output).toContain("Web");
const calls = mockExec.mock.calls.map((c: any) => c[0]);
expect(calls.some((cmd: string) => cmd === "bun install")).toBe(true);
expect(calls.some((cmd: string) => cmd.includes("cd web && bun install"))).toBe(true);
});
});
describe("categorizeChanges", () => {
test("should categorize files correctly", () => {
const files = [
"bot/commands/admin/update.ts",
"bot/modules/admin/update.view.ts",
"web/src/components/Button.tsx",
"shared/lib/utils.ts",
"package.json",
"drizzle/0001_migration.sql"
];
const categories = UpdateService.categorizeChanges(files);
expect(categories["Commands"]).toBe(1);
expect(categories["Modules"]).toBe(1);
expect(categories["Web Dashboard"]).toBe(1);
expect(categories["Library"]).toBe(1);
expect(categories["Dependencies"]).toBe(1);
expect(categories["Database"]).toBe(1);
});
});
@@ -130,7 +178,10 @@ describe("UpdateService", () => {
userId: "456",
timestamp: Date.now(),
runMigrations: true,
installDependencies: false
installDependencies: false,
buildWebAssets: false,
previousCommit: "abc1234",
newCommit: "def5678"
};
await UpdateService.prepareRestartContext(context);
@@ -143,6 +194,39 @@ describe("UpdateService", () => {
});
});
describe("saveRollbackPoint", () => {
test("should save current commit hash to file", async () => {
const commit = await UpdateService.saveRollbackPoint();
expect(commit).toBeTruthy();
expect(mockWriteFile).toHaveBeenCalled();
const lastCall = mockWriteFile.mock.lastCall as [string, string] | undefined;
expect(lastCall![0]).toContain("rollback_commit");
});
});
describe("hasRollbackPoint", () => {
test("should return true when rollback file exists", async () => {
mockReadFile.mockImplementationOnce(() => Promise.resolve("abc123"));
// Clear cache first
(UpdateService as any).rollbackPointExists = null;
const result = await UpdateService.hasRollbackPoint();
expect(result).toBe(true);
});
test("should return false when rollback file does not exist", async () => {
mockReadFile.mockImplementationOnce(() => Promise.reject(new Error("ENOENT")));
// Clear cache first
(UpdateService as any).rollbackPointExists = null;
const result = await UpdateService.hasRollbackPoint();
expect(result).toBe(false);
});
});
describe("triggerRestart", () => {
test("should use RESTART_COMMAND env var when set", async () => {
const originalEnv = process.env.RESTART_COMMAND;
@@ -150,24 +234,19 @@ describe("UpdateService", () => {
await UpdateService.triggerRestart();
const lastCall = mockExec.mock.lastCall;
expect(lastCall).toBeDefined();
expect(lastCall![0]).toBe("pm2 restart bot");
const calls = mockExec.mock.calls.map((c: any) => c[0]);
expect(calls.some((cmd: string) => cmd === "pm2 restart bot")).toBe(true);
process.env.RESTART_COMMAND = originalEnv;
});
test("should write to trigger file when no env var", async () => {
test("should call process.exit when no env var is set", async () => {
const originalEnv = process.env.RESTART_COMMAND;
delete process.env.RESTART_COMMAND;
// Just verify it doesn't throw - actual process.exit is mocked by setTimeout
await UpdateService.triggerRestart();
expect(mockWriteFile).toHaveBeenCalled();
const lastCall = mockWriteFile.mock.lastCall as [string, string] | undefined;
expect(lastCall).toBeDefined();
expect(lastCall![0]).toContain("restart_trigger");
process.env.RESTART_COMMAND = originalEnv;
});
});
@@ -181,7 +260,7 @@ describe("UpdateService", () => {
const createMockChannel = () => ({
isSendable: () => true,
send: mock(() => Promise.resolve())
send: mock(() => Promise.resolve({ edit: mock(() => Promise.resolve()), delete: mock(() => Promise.resolve()) }))
});
test("should ignore stale context (>10 mins old)", async () => {
@@ -190,7 +269,10 @@ describe("UpdateService", () => {
userId: "456",
timestamp: Date.now() - (15 * 60 * 1000), // 15 mins ago
runMigrations: true,
installDependencies: true
installDependencies: true,
buildWebAssets: false,
previousCommit: "abc",
newCommit: "def"
};
mockReadFile.mockImplementationOnce(() => Promise.resolve(JSON.stringify(staleContext)));
@@ -227,7 +309,10 @@ describe("UpdateService", () => {
userId: "456",
timestamp: Date.now(),
runMigrations: false,
installDependencies: false
installDependencies: false,
buildWebAssets: false,
previousCommit: "abc",
newCommit: "def"
};
mockReadFile.mockImplementationOnce(() => Promise.resolve(JSON.stringify(validContext)));
@@ -236,7 +321,7 @@ describe("UpdateService", () => {
const { TextChannel } = await import("discord.js");
const mockChannel = Object.create(TextChannel.prototype);
mockChannel.isSendable = () => true;
mockChannel.send = mock(() => Promise.resolve());
mockChannel.send = mock(() => Promise.resolve({ edit: mock(() => Promise.resolve()), delete: mock(() => Promise.resolve()) }));
const mockClient = createMockClient(mockChannel);

View File

@@ -1,8 +1,8 @@
import { exec } from "child_process";
import { exec, type ExecException } from "child_process";
import { promisify } from "util";
import { writeFile, readFile, unlink } from "fs/promises";
import { Client, TextChannel } from "discord.js";
import { getPostRestartEmbed, getInstallingDependenciesEmbed, getRunningMigrationsEmbed } from "@/modules/admin/update.view";
import { getPostRestartEmbed, getPostRestartProgressEmbed, type PostRestartProgress } from "@/modules/admin/update.view";
import type { PostRestartResult } from "@/modules/admin/update.view";
import type { RestartContext, UpdateCheckResult, UpdateInfo, CommitInfo } from "@/modules/admin/update.types";
@@ -10,32 +10,69 @@ const execAsync = promisify(exec);
// Constants
const STALE_CONTEXT_MS = 10 * 60 * 1000; // 10 minutes
const DEFAULT_TIMEOUT_MS = 30_000; // 30 seconds for git commands
const INSTALL_TIMEOUT_MS = 120_000; // 2 minutes for dependency installation
const BUILD_TIMEOUT_MS = 180_000; // 3 minutes for web build
/**
* Execute a command with timeout protection
*/
async function execWithTimeout(
cmd: string,
timeoutMs: number = DEFAULT_TIMEOUT_MS
): Promise<{ stdout: string; stderr: string }> {
return new Promise((resolve, reject) => {
const process = exec(cmd, (error: ExecException | null, stdout: string, stderr: string) => {
if (error) {
reject(error);
} else {
resolve({ stdout, stderr });
}
});
const timer = setTimeout(() => {
process.kill("SIGTERM");
reject(new Error(`Command timed out after ${timeoutMs}ms: ${cmd}`));
}, timeoutMs);
process.on("exit", () => clearTimeout(timer));
});
}
export class UpdateService {
private static readonly CONTEXT_FILE = ".restart_context.json";
private static readonly ROLLBACK_FILE = ".rollback_commit.txt";
// Cache for rollback state (set when we save, cleared on cleanup)
private static rollbackPointExists: boolean | null = null;
/**
* Check for available updates with detailed commit information
* Optimized: Parallel git commands and combined requirements check
*/
static async checkForUpdates(): Promise<UpdateInfo> {
const { stdout: branchName } = await execAsync("git rev-parse --abbrev-ref HEAD");
static async checkForUpdates(): Promise<UpdateInfo & { requirements: UpdateCheckResult }> {
// Get branch first (needed for subsequent commands)
const { stdout: branchName } = await execWithTimeout("git rev-parse --abbrev-ref HEAD");
const branch = branchName.trim();
const { stdout: currentCommit } = await execAsync("git rev-parse --short HEAD");
// Parallel execution: get current commit while fetching
const [currentResult] = await Promise.all([
execWithTimeout("git rev-parse --short HEAD"),
execWithTimeout(`git fetch origin ${branch} --prune`) // Only fetch current branch
]);
const currentCommit = currentResult.stdout.trim();
await execAsync("git fetch --all");
// After fetch completes, get remote info in parallel
const [latestResult, logResult, diffResult] = await Promise.all([
execWithTimeout(`git rev-parse --short origin/${branch}`),
execWithTimeout(`git log HEAD..origin/${branch} --format="%h|%s|%an" --no-merges`),
execWithTimeout(`git diff HEAD..origin/${branch} --name-only`)
]);
const { stdout: latestCommit } = await execAsync(`git rev-parse --short origin/${branch}`);
const latestCommit = latestResult.stdout.trim();
// Get commit log with author info
const { stdout: logOutput } = await execAsync(
`git log HEAD..origin/${branch} --format="%h|%s|%an" --no-merges`
);
const commits: CommitInfo[] = logOutput
// Parse commit log
const commits: CommitInfo[] = logResult.stdout
.trim()
.split("\n")
.filter(line => line.length > 0)
@@ -44,47 +81,70 @@ export class UpdateService {
return { hash: hash || "", message: message || "", author: author || "" };
});
// Parse changed files and analyze requirements in one pass
const changedFiles = diffResult.stdout.trim().split("\n").filter(f => f.length > 0);
const requirements = this.analyzeChangedFiles(changedFiles);
return {
hasUpdates: commits.length > 0,
branch,
currentCommit: currentCommit.trim(),
latestCommit: latestCommit.trim(),
currentCommit,
latestCommit,
commitCount: commits.length,
commits
commits,
requirements
};
}
/**
* Analyze what the update requires
* Analyze changed files to determine update requirements
* Extracted for reuse and clarity
*/
private static analyzeChangedFiles(changedFiles: string[]): UpdateCheckResult {
const needsRootInstall = changedFiles.some(file =>
file === "package.json" || file === "bun.lock"
);
const needsWebInstall = changedFiles.some(file =>
file === "web/package.json" || file === "web/bun.lock"
);
// Only rebuild web if essential source files changed
const needsWebBuild = changedFiles.some(file =>
file.match(/^web\/src\/(components|pages|lib|index)/) ||
file === "web/build.ts" ||
file === "web/tailwind.config.ts" ||
file === "web/tsconfig.json"
);
const needsMigrations = changedFiles.some(file =>
file.includes("schema.ts") || file.startsWith("drizzle/")
);
return {
needsRootInstall,
needsWebInstall,
needsWebBuild,
needsMigrations,
changedFiles
};
}
/**
* @deprecated Use checkForUpdates() which now includes requirements
* Kept for backwards compatibility
*/
static async checkUpdateRequirements(branch: string): Promise<UpdateCheckResult> {
try {
const { stdout } = await execAsync(`git diff HEAD..origin/${branch} --name-only`);
const { stdout } = await execWithTimeout(`git diff HEAD..origin/${branch} --name-only`);
const changedFiles = stdout.trim().split("\n").filter(f => f.length > 0);
const needsRootInstall = changedFiles.some(file =>
file === "package.json" || file === "bun.lock"
);
const needsWebInstall = changedFiles.some(file =>
file === "web/package.json" || file === "web/bun.lock"
);
const needsMigrations = changedFiles.some(file =>
file.includes("schema.ts") || file.startsWith("drizzle/")
);
return {
needsRootInstall,
needsWebInstall,
needsMigrations,
changedFiles
};
return this.analyzeChangedFiles(changedFiles);
} catch (e) {
console.error("Failed to check update requirements:", e);
return {
needsRootInstall: false,
needsWebInstall: false,
needsWebBuild: false,
needsMigrations: false,
changedFiles: [],
error: e instanceof Error ? e : new Error(String(e))
@@ -119,9 +179,10 @@ export class UpdateService {
* Save the current commit for potential rollback
*/
static async saveRollbackPoint(): Promise<string> {
const { stdout } = await execAsync("git rev-parse HEAD");
const { stdout } = await execWithTimeout("git rev-parse HEAD");
const commit = stdout.trim();
await writeFile(this.ROLLBACK_FILE, commit);
this.rollbackPointExists = true; // Cache the state
return commit;
}
@@ -131,8 +192,9 @@ export class UpdateService {
static async rollback(): Promise<{ success: boolean; message: string }> {
try {
const rollbackCommit = await readFile(this.ROLLBACK_FILE, "utf-8");
await execAsync(`git reset --hard ${rollbackCommit.trim()}`);
await execWithTimeout(`git reset --hard ${rollbackCommit.trim()}`);
await unlink(this.ROLLBACK_FILE);
this.rollbackPointExists = false;
return { success: true, message: `Rolled back to ${rollbackCommit.trim().substring(0, 7)}` };
} catch (e) {
return {
@@ -144,12 +206,18 @@ export class UpdateService {
/**
* Check if a rollback point exists
* Uses cache when available to avoid file system access
*/
static async hasRollbackPoint(): Promise<boolean> {
if (this.rollbackPointExists !== null) {
return this.rollbackPointExists;
}
try {
await readFile(this.ROLLBACK_FILE, "utf-8");
this.rollbackPointExists = true;
return true;
} catch {
this.rollbackPointExists = false;
return false;
}
}
@@ -158,26 +226,32 @@ export class UpdateService {
* Perform the git update
*/
static async performUpdate(branch: string): Promise<void> {
await execAsync(`git reset --hard origin/${branch}`);
await execWithTimeout(`git reset --hard origin/${branch}`);
}
/**
* Install dependencies for specified projects
* Optimized: Parallel installation
*/
static async installDependencies(options: { root: boolean; web: boolean }): Promise<string> {
const outputs: string[] = [];
const tasks: Promise<{ label: string; output: string }>[] = [];
if (options.root) {
const { stdout } = await execAsync("bun install");
outputs.push(`📦 Root: ${stdout.trim() || "Done"}`);
tasks.push(
execWithTimeout("bun install", INSTALL_TIMEOUT_MS)
.then(({ stdout }) => ({ label: "📦 Root", output: stdout.trim() || "Done" }))
);
}
if (options.web) {
const { stdout } = await execAsync("cd web && bun install");
outputs.push(`🌐 Web: ${stdout.trim() || "Done"}`);
tasks.push(
execWithTimeout("cd web && bun install", INSTALL_TIMEOUT_MS)
.then(({ stdout }) => ({ label: "🌐 Web", output: stdout.trim() || "Done" }))
);
}
return outputs.join("\n");
const results = await Promise.all(tasks);
return results.map(r => `${r.label}: ${r.output}`).join("\n");
}
/**
@@ -218,7 +292,7 @@ export class UpdateService {
}
const result = await this.executePostRestartTasks(context, channel);
await this.notifyPostRestartResult(channel, result, context);
await this.notifyPostRestartResult(channel, result);
await this.cleanupContext();
} catch (e) {
console.error("Failed to handle post-restart context:", e);
@@ -259,51 +333,120 @@ export class UpdateService {
const result: PostRestartResult = {
installSuccess: true,
installOutput: "",
webBuildSuccess: true,
webBuildOutput: "",
migrationSuccess: true,
migrationOutput: "",
ranInstall: context.installDependencies,
ranWebBuild: context.buildWebAssets,
ranMigrations: context.runMigrations,
previousCommit: context.previousCommit,
newCommit: context.newCommit
};
// 1. Install Dependencies if needed
// Track progress for consolidated message
const progress: PostRestartProgress = {
installDeps: context.installDependencies,
buildWeb: context.buildWebAssets,
runMigrations: context.runMigrations,
currentStep: "starting"
};
// Only send progress message if there are tasks to run
const hasTasks = context.installDependencies || context.buildWebAssets || context.runMigrations;
let progressMessage = hasTasks
? await channel.send({ embeds: [getPostRestartProgressEmbed(progress)] })
: null;
// Helper to update progress message
const updateProgress = async () => {
if (progressMessage) {
await progressMessage.edit({ embeds: [getPostRestartProgressEmbed(progress)] });
}
};
// 1. Install Dependencies if needed (PARALLELIZED)
if (context.installDependencies) {
try {
await channel.send({ embeds: [getInstallingDependenciesEmbed()] });
progress.currentStep = "install";
await updateProgress();
const { stdout: rootOutput } = await execAsync("bun install");
const { stdout: webOutput } = await execAsync("cd web && bun install");
// Parallel installation of root and web dependencies
const [rootResult, webResult] = await Promise.all([
execWithTimeout("bun install", INSTALL_TIMEOUT_MS)
.then(({ stdout }) => ({ success: true, output: stdout.trim() || "Done" }))
.catch(err => ({ success: false, output: err instanceof Error ? err.message : String(err) })),
execWithTimeout("cd web && bun install", INSTALL_TIMEOUT_MS)
.then(({ stdout }) => ({ success: true, output: stdout.trim() || "Done" }))
.catch(err => ({ success: false, output: err instanceof Error ? err.message : String(err) }))
]);
result.installOutput = `📦 Root: ${rootOutput.trim() || "Done"}\n🌐 Web: ${webOutput.trim() || "Done"}`;
result.installSuccess = rootResult.success && webResult.success;
result.installOutput = `📦 Root: ${rootResult.output}\n🌐 Web: ${webResult.output}`;
progress.installDone = true;
if (!result.installSuccess) {
console.error("Dependency Install Failed:", result.installOutput);
}
} catch (err: unknown) {
result.installSuccess = false;
result.installOutput = err instanceof Error ? err.message : String(err);
progress.installDone = true;
console.error("Dependency Install Failed:", err);
}
}
// 2. Run Migrations
// 2. Build Web Assets if needed
if (context.buildWebAssets) {
try {
progress.currentStep = "build";
await updateProgress();
const { stdout } = await execWithTimeout("cd web && bun run build", BUILD_TIMEOUT_MS);
result.webBuildOutput = stdout.trim() || "Build completed successfully";
progress.buildDone = true;
} catch (err: unknown) {
result.webBuildSuccess = false;
result.webBuildOutput = err instanceof Error ? err.message : String(err);
progress.buildDone = true;
console.error("Web Build Failed:", err);
}
}
// 3. Run Migrations
if (context.runMigrations) {
try {
await channel.send({ embeds: [getRunningMigrationsEmbed()] });
const { stdout } = await execAsync("bun x drizzle-kit migrate");
progress.currentStep = "migrate";
await updateProgress();
const { stdout } = await execWithTimeout("bun x drizzle-kit migrate", DEFAULT_TIMEOUT_MS);
result.migrationOutput = stdout;
progress.migrateDone = true;
} catch (err: unknown) {
result.migrationSuccess = false;
result.migrationOutput = err instanceof Error ? err.message : String(err);
progress.migrateDone = true;
console.error("Migration Failed:", err);
}
}
// Delete progress message before final result
if (progressMessage) {
try {
await progressMessage.delete();
} catch {
// Message may already be deleted, ignore
}
}
return result;
}
private static async notifyPostRestartResult(
channel: TextChannel,
result: PostRestartResult,
context: RestartContext
result: PostRestartResult
): Promise<void> {
// Use cached rollback state - we just saved it before restart
const hasRollback = await this.hasRollbackPoint();
await channel.send(getPostRestartEmbed(result, hasRollback));
}
@@ -314,5 +457,6 @@ export class UpdateService {
} catch {
// File may not exist, ignore
}
// Don't clear rollback cache here - rollback file persists
}
}

View File

@@ -13,6 +13,7 @@ export const DashboardStatsSchema = z.object({
bot: z.object({
name: z.string(),
avatarUrl: z.string().nullable(),
status: z.string().nullable(),
}),
guilds: z.object({
count: z.number(),
@@ -84,6 +85,7 @@ export const ClientStatsSchema = z.object({
bot: z.object({
name: z.string(),
avatarUrl: z.string().nullable(),
status: z.string().nullable(),
}),
guilds: z.number(),
ping: z.number(),

View File

@@ -87,7 +87,7 @@ export const economyService = {
});
if (cooldown && cooldown.expiresAt > now) {
throw new UserError(`Daily already claimed today. Next claim <t:${Math.floor(cooldown.expiresAt.getTime() / 1000)}:F>`);
throw new UserError(`You have already claimed your daily reward today.\nNext claim available: <t:${Math.floor(cooldown.expiresAt.getTime() / 1000)}:F> (<t:${Math.floor(cooldown.expiresAt.getTime() / 1000)}:R>)`);
}
// Get user for streak logic
@@ -196,6 +196,10 @@ export const economyService = {
description: description,
});
// Trigger Quest Event
const { questService } = await import("@shared/modules/quest/quest.service");
await questService.handleEvent(id, type, 1, txFn);
return user;
}, tx);
},

View File

@@ -0,0 +1,237 @@
import { describe, it, expect, mock, beforeEach, setSystemTime } from "bun:test";
import { examService, ExamStatus } from "@shared/modules/economy/exam.service";
import { users, userTimers, transactions } from "@db/schema";
// Define mock functions
const mockFindFirst = mock();
const mockInsert = mock();
const mockUpdate = mock();
const mockValues = mock();
const mockReturning = mock();
const mockSet = mock();
const mockWhere = mock();
// Chainable mock setup
mockInsert.mockReturnValue({ values: mockValues });
mockValues.mockReturnValue({ returning: mockReturning });
mockUpdate.mockReturnValue({ set: mockSet });
mockSet.mockReturnValue({ where: mockWhere });
mockWhere.mockReturnValue({ returning: mockReturning });
// Mock DrizzleClient
mock.module("@shared/db/DrizzleClient", () => {
const createMockTx = () => ({
query: {
users: { findFirst: mockFindFirst },
userTimers: { findFirst: mockFindFirst },
},
insert: mockInsert,
update: mockUpdate,
});
return {
DrizzleClient: {
query: {
users: { findFirst: mockFindFirst },
userTimers: { findFirst: mockFindFirst },
},
insert: mockInsert,
update: mockUpdate,
transaction: async (cb: any) => {
return cb(createMockTx());
}
},
};
});
// Mock withTransaction
mock.module("@/lib/db", () => ({
withTransaction: async (cb: any, tx?: any) => {
if (tx) return cb(tx);
return cb({
query: {
users: { findFirst: mockFindFirst },
userTimers: { findFirst: mockFindFirst },
},
insert: mockInsert,
update: mockUpdate,
});
}
}));
// Mock Config
mock.module("@shared/lib/config", () => ({
config: {
economy: {
exam: {
multMin: 1.0,
multMax: 2.0,
}
}
}
}));
// Mock User Service
mock.module("@shared/modules/user/user.service", () => ({
userService: {
getOrCreateUser: mock()
}
}));
// Mock Dashboard Service
mock.module("@shared/modules/dashboard/dashboard.service", () => ({
dashboardService: {
recordEvent: mock()
}
}));
describe("ExamService", () => {
beforeEach(() => {
mockFindFirst.mockReset();
mockInsert.mockClear();
mockUpdate.mockClear();
mockValues.mockClear();
mockReturning.mockClear();
mockSet.mockClear();
mockWhere.mockClear();
});
describe("getExamStatus", () => {
it("should return NOT_REGISTERED if no timer exists", async () => {
mockFindFirst.mockResolvedValue(undefined);
const status = await examService.getExamStatus("1");
expect(status.status).toBe(ExamStatus.NOT_REGISTERED);
});
it("should return COOLDOWN if now < expiresAt", async () => {
const now = new Date("2024-01-10T12:00:00Z");
setSystemTime(now);
const future = new Date("2024-01-11T00:00:00Z");
mockFindFirst.mockResolvedValue({
expiresAt: future,
metadata: { examDay: 3, lastXp: "100" }
});
const status = await examService.getExamStatus("1");
expect(status.status).toBe(ExamStatus.COOLDOWN);
expect(status.nextExamAt?.getTime()).toBe(future.setHours(0,0,0,0));
});
it("should return MISSED if it is the wrong day", async () => {
const now = new Date("2024-01-15T12:00:00Z"); // Monday (1)
setSystemTime(now);
const past = new Date("2024-01-10T00:00:00Z"); // Wednesday (3) last week
mockFindFirst.mockResolvedValue({
expiresAt: past,
metadata: { examDay: 3, lastXp: "100" } // Registered for Wednesday
});
const status = await examService.getExamStatus("1");
expect(status.status).toBe(ExamStatus.MISSED);
expect(status.examDay).toBe(3);
});
it("should return AVAILABLE if it is the correct day", async () => {
const now = new Date("2024-01-17T12:00:00Z"); // Wednesday (3)
setSystemTime(now);
const past = new Date("2024-01-10T00:00:00Z");
mockFindFirst.mockResolvedValue({
expiresAt: past,
metadata: { examDay: 3, lastXp: "100" }
});
const status = await examService.getExamStatus("1");
expect(status.status).toBe(ExamStatus.AVAILABLE);
expect(status.examDay).toBe(3);
expect(status.lastXp).toBe(100n);
});
});
describe("registerForExam", () => {
it("should create user and timer correctly", async () => {
const now = new Date("2024-01-15T12:00:00Z"); // Monday (1)
setSystemTime(now);
const { userService } = await import("@shared/modules/user/user.service");
(userService.getOrCreateUser as any).mockResolvedValue({ id: 1n, xp: 500n });
const result = await examService.registerForExam("1", "testuser");
expect(result.status).toBe(ExamStatus.NOT_REGISTERED);
expect(result.examDay).toBe(1);
expect(mockInsert).toHaveBeenCalledWith(userTimers);
expect(mockInsert).toHaveBeenCalledTimes(1);
});
});
describe("takeExam", () => {
it("should return NOT_REGISTERED if not registered", async () => {
mockFindFirst.mockResolvedValueOnce({ id: 1n }) // user check
.mockResolvedValueOnce(undefined); // timer check
const result = await examService.takeExam("1");
expect(result.status).toBe(ExamStatus.NOT_REGISTERED);
});
it("should handle missed exam and schedule for next exam day", async () => {
const now = new Date("2024-01-15T12:00:00Z"); // Monday (1)
setSystemTime(now);
const past = new Date("2024-01-10T00:00:00Z");
mockFindFirst.mockResolvedValueOnce({ id: 1n, xp: 600n }) // user
.mockResolvedValueOnce({
expiresAt: past,
metadata: { examDay: 3, lastXp: "500" } // Registered for Wednesday
}); // timer
const result = await examService.takeExam("1");
expect(result.status).toBe(ExamStatus.MISSED);
expect(result.examDay).toBe(3);
// Should set next exam to next Wednesday
// Monday (1) + 2 days = Wednesday (3)
const expected = new Date("2024-01-17T00:00:00Z");
expect(result.nextExamAt!.getTime()).toBe(expected.getTime());
expect(mockUpdate).toHaveBeenCalledWith(userTimers);
});
it("should calculate rewards and update state when passed", async () => {
const now = new Date("2024-01-17T12:00:00Z"); // Wednesday (3)
setSystemTime(now);
const past = new Date("2024-01-10T00:00:00Z");
mockFindFirst.mockResolvedValueOnce({ id: 1n, username: "testuser", xp: 1000n, balance: 0n }) // user
.mockResolvedValueOnce({
expiresAt: past,
metadata: { examDay: 3, lastXp: "500" }
}); // timer
const result = await examService.takeExam("1");
expect(result.status).toBe(ExamStatus.AVAILABLE);
expect(result.xpDiff).toBe(500n);
// Multiplier is between 1.0 and 2.0 based on mock config
expect(result.multiplier).toBeGreaterThanOrEqual(1.0);
expect(result.multiplier).toBeLessThanOrEqual(2.0);
expect(result.reward).toBeGreaterThanOrEqual(500n);
expect(result.reward).toBeLessThanOrEqual(1000n);
expect(mockUpdate).toHaveBeenCalledWith(userTimers);
expect(mockUpdate).toHaveBeenCalledWith(users);
// Verify transaction
expect(mockInsert).toHaveBeenCalledWith(transactions);
expect(mockValues).toHaveBeenCalledWith(expect.objectContaining({
amount: result.reward,
userId: 1n,
type: expect.anything()
}));
});
});
});

View File

@@ -0,0 +1,262 @@
import { users, userTimers, transactions } from "@db/schema";
import { eq, and, sql } from "drizzle-orm";
import { TimerType, TransactionType } from "@shared/lib/constants";
import { config } from "@shared/lib/config";
import { withTransaction } from "@/lib/db";
import type { Transaction } from "@shared/lib/types";
import { UserError } from "@shared/lib/errors";
const EXAM_TIMER_TYPE = TimerType.EXAM_SYSTEM;
const EXAM_TIMER_KEY = 'default';
export interface ExamMetadata {
examDay: number;
lastXp: string;
}
export enum ExamStatus {
NOT_REGISTERED = 'NOT_REGISTERED',
COOLDOWN = 'COOLDOWN',
MISSED = 'MISSED',
AVAILABLE = 'AVAILABLE',
}
export interface ExamActionResult {
status: ExamStatus;
nextExamAt?: Date;
reward?: bigint;
xpDiff?: bigint;
multiplier?: number;
examDay?: number;
}
export const examService = {
/**
* Get the current exam status for a user.
*/
async getExamStatus(userId: string, tx?: Transaction) {
return await withTransaction(async (txFn) => {
const timer = await txFn.query.userTimers.findFirst({
where: and(
eq(userTimers.userId, BigInt(userId)),
eq(userTimers.type, EXAM_TIMER_TYPE),
eq(userTimers.key, EXAM_TIMER_KEY)
)
});
if (!timer) {
return { status: ExamStatus.NOT_REGISTERED };
}
const now = new Date();
const expiresAt = new Date(timer.expiresAt);
expiresAt.setHours(0, 0, 0, 0);
if (now < expiresAt) {
return {
status: ExamStatus.COOLDOWN,
nextExamAt: expiresAt
};
}
const metadata = timer.metadata as unknown as ExamMetadata;
const currentDay = now.getDay();
if (currentDay !== metadata.examDay) {
return {
status: ExamStatus.MISSED,
nextExamAt: expiresAt,
examDay: metadata.examDay
};
}
return {
status: ExamStatus.AVAILABLE,
examDay: metadata.examDay,
lastXp: BigInt(metadata.lastXp || "0")
};
}, tx);
},
/**
* Register a user for the first time.
*/
async registerForExam(userId: string, username: string, tx?: Transaction): Promise<ExamActionResult> {
return await withTransaction(async (txFn) => {
// Ensure user exists
const { userService } = await import("@shared/modules/user/user.service");
const user = await userService.getOrCreateUser(userId, username, txFn);
if (!user) throw new Error("Failed to get or create user.");
const now = new Date();
const currentDay = now.getDay();
// Set next exam to next week
const nextExamDate = new Date(now);
nextExamDate.setDate(now.getDate() + 7);
nextExamDate.setHours(0, 0, 0, 0);
const metadata: ExamMetadata = {
examDay: currentDay,
lastXp: (user.xp ?? 0n).toString()
};
await txFn.insert(userTimers).values({
userId: BigInt(userId),
type: EXAM_TIMER_TYPE,
key: EXAM_TIMER_KEY,
expiresAt: nextExamDate,
metadata: metadata
});
return {
status: ExamStatus.NOT_REGISTERED,
nextExamAt: nextExamDate,
examDay: currentDay
};
}, tx);
},
/**
* Take the exam. Handles missed exams and reward calculations.
*/
async takeExam(userId: string, tx?: Transaction): Promise<ExamActionResult> {
return await withTransaction(async (txFn) => {
const user = await txFn.query.users.findFirst({
where: eq(users.id, BigInt(userId))
});
if (!user) throw new Error("User not found");
const timer = await txFn.query.userTimers.findFirst({
where: and(
eq(userTimers.userId, BigInt(userId)),
eq(userTimers.type, EXAM_TIMER_TYPE),
eq(userTimers.key, EXAM_TIMER_KEY)
)
});
if (!timer) {
return { status: ExamStatus.NOT_REGISTERED };
}
const now = new Date();
const expiresAt = new Date(timer.expiresAt);
expiresAt.setHours(0, 0, 0, 0);
if (now < expiresAt) {
return {
status: ExamStatus.COOLDOWN,
nextExamAt: expiresAt
};
}
const metadata = timer.metadata as unknown as ExamMetadata;
const examDay = metadata.examDay;
const currentDay = now.getDay();
if (currentDay !== examDay) {
// Missed exam logic
let daysUntil = (examDay - currentDay + 7) % 7;
if (daysUntil === 0) daysUntil = 7;
const nextExamDate = new Date(now);
nextExamDate.setDate(now.getDate() + daysUntil);
nextExamDate.setHours(0, 0, 0, 0);
const newMetadata: ExamMetadata = {
examDay: examDay,
lastXp: (user.xp ?? 0n).toString()
};
await txFn.update(userTimers)
.set({
expiresAt: nextExamDate,
metadata: newMetadata
})
.where(and(
eq(userTimers.userId, BigInt(userId)),
eq(userTimers.type, EXAM_TIMER_TYPE),
eq(userTimers.key, EXAM_TIMER_KEY)
));
return {
status: ExamStatus.MISSED,
nextExamAt: nextExamDate,
examDay: examDay
};
}
// Reward Calculation
const lastXp = BigInt(metadata.lastXp || "0");
const currentXp = user.xp ?? 0n;
const diff = currentXp - lastXp;
const multMin = config.economy.exam.multMin;
const multMax = config.economy.exam.multMax;
const multiplier = Math.random() * (multMax - multMin) + multMin;
let reward = 0n;
if (diff > 0n) {
// Use scaled BigInt arithmetic to avoid precision loss with large XP values
const scaledMultiplier = BigInt(Math.round(multiplier * 10000));
reward = (diff * scaledMultiplier) / 10000n;
}
const nextExamDate = new Date(now);
nextExamDate.setDate(now.getDate() + 7);
nextExamDate.setHours(0, 0, 0, 0);
const newMetadata: ExamMetadata = {
examDay: examDay,
lastXp: currentXp.toString()
};
// Update Timer
await txFn.update(userTimers)
.set({
expiresAt: nextExamDate,
metadata: newMetadata
})
.where(and(
eq(userTimers.userId, BigInt(userId)),
eq(userTimers.type, EXAM_TIMER_TYPE),
eq(userTimers.key, EXAM_TIMER_KEY)
));
// Add Currency
if (reward > 0n) {
await txFn.update(users)
.set({
balance: sql`${users.balance} + ${reward}`
})
.where(eq(users.id, BigInt(userId)));
// Add Transaction Record
await txFn.insert(transactions).values({
userId: BigInt(userId),
amount: reward,
type: TransactionType.EXAM_REWARD,
description: `Weekly exam reward (XP Diff: ${diff})`,
});
}
// Record dashboard event
const { dashboardService } = await import("@shared/modules/dashboard/dashboard.service");
await dashboardService.recordEvent({
type: 'success',
message: `${user.username} passed their exam: ${reward.toLocaleString()} AU`,
icon: '🎓'
});
return {
status: ExamStatus.AVAILABLE,
nextExamAt: nextExamDate,
reward,
xpDiff: diff,
multiplier,
examDay
};
}, tx);
}
};

View File

@@ -37,6 +37,11 @@ export const inventoryService = {
eq(inventory.itemId, itemId)
))
.returning();
// Trigger Quest Event
const { questService } = await import("@shared/modules/quest/quest.service");
await questService.handleEvent(userId, `ITEM_COLLECT:${itemId}`, Number(quantity), txFn);
return entry;
} else {
// Check Slot Limit
@@ -60,6 +65,11 @@ export const inventoryService = {
quantity: quantity,
})
.returning();
// Trigger Quest Event
const { questService } = await import("@shared/modules/quest/quest.service");
await questService.handleEvent(userId, `ITEM_COLLECT:${itemId}`, Number(quantity), txFn);
return entry;
}
}, tx);
@@ -179,6 +189,10 @@ export const inventoryService = {
await inventoryService.removeItem(userId, itemId, 1n, txFn);
}
// Trigger Quest Event
const { questService } = await import("@shared/modules/quest/quest.service");
await questService.handleEvent(userId, `ITEM_USE:${itemId}`, 1, txFn);
return { success: true, results, usageData, item };
}, tx);
},

View File

@@ -68,6 +68,10 @@ export const levelingService = {
.where(eq(users.id, BigInt(id)))
.returning();
// Trigger Quest Event
const { questService } = await import("@shared/modules/quest/quest.service");
await questService.handleEvent(id, 'XP_GAIN', Number(amount), txFn);
return { user: updatedUser, levelUp, currentLevel: newLevel };
}, tx);
},

View File

@@ -33,6 +33,7 @@ mock.module("@shared/db/DrizzleClient", () => {
const createMockTx = () => ({
query: {
userQuests: { findFirst: mockFindFirst, findMany: mockFindMany },
quests: { findMany: mockFindMany },
},
insert: mockInsert,
update: mockUpdate,
@@ -148,4 +149,147 @@ describe("questService", () => {
expect(result).toEqual(mockData as any);
});
});
describe("getAvailableQuests", () => {
it("should return quests not yet accepted by user", async () => {
// First call to findMany (userQuests) returns accepted quest IDs
// Second call to findMany (quests) returns available quests
mockFindMany
.mockResolvedValueOnce([{ questId: 1 }]) // userQuests
.mockResolvedValueOnce([{ id: 2, name: "New Quest" }]); // quests
const result = await questService.getAvailableQuests("1");
expect(result).toEqual([{ id: 2, name: "New Quest" }] as any);
expect(mockFindMany).toHaveBeenCalledTimes(2);
});
it("should return all quests if user has no assigned quests", async () => {
mockFindMany
.mockResolvedValueOnce([]) // userQuests
.mockResolvedValueOnce([{ id: 1 }, { id: 2 }]); // quests
const result = await questService.getAvailableQuests("1");
expect(result).toEqual([{ id: 1 }, { id: 2 }] as any);
});
});
describe("handleEvent", () => {
it("should progress a quest with sub-events", async () => {
const mockUserQuest = {
userId: 1n,
questId: 101,
progress: 0,
completedAt: null,
quest: { triggerEvent: "ITEM_USE:101", requirements: { target: 5 } }
};
mockFindMany.mockResolvedValue([mockUserQuest]);
mockReturning.mockResolvedValue([{ userId: 1n, questId: 101, progress: 1 }]);
await questService.handleEvent("1", "ITEM_USE:101", 1);
expect(mockUpdate).toHaveBeenCalled();
expect(mockSet).toHaveBeenCalledWith({ progress: 1 });
});
it("should complete a quest when target reached using sub-events", async () => {
const mockUserQuest = {
userId: 1n,
questId: 101,
progress: 4,
completedAt: null,
quest: {
triggerEvent: "ITEM_COLLECT:505",
requirements: { target: 5 },
rewards: { balance: 100 }
}
};
mockFindMany.mockResolvedValue([mockUserQuest]);
mockFindFirst.mockResolvedValue(mockUserQuest); // For completeQuest
await questService.handleEvent("1", "ITEM_COLLECT:505", 1);
// Verify completeQuest was called (it will update completedAt)
expect(mockUpdate).toHaveBeenCalled();
expect(mockSet).toHaveBeenCalledWith({ completedAt: expect.any(Date) });
});
it("should progress a quest with generic events", async () => {
const mockUserQuest = {
userId: 1n,
questId: 102,
progress: 0,
completedAt: null,
quest: { triggerEvent: "ITEM_COLLECT", requirements: { target: 5 } }
};
mockFindMany.mockResolvedValue([mockUserQuest]);
mockReturning.mockResolvedValue([{ userId: 1n, questId: 102, progress: 1 }]);
await questService.handleEvent("1", "ITEM_COLLECT:505", 1);
expect(mockUpdate).toHaveBeenCalled();
expect(mockSet).toHaveBeenCalledWith({ progress: 1 });
});
it("should ignore events that are not prefix matches", async () => {
const mockUserQuest = {
userId: 1n,
questId: 103,
progress: 0,
completedAt: null,
quest: { triggerEvent: "ITEM_COLLECT", requirements: { target: 5 } }
};
mockFindMany.mockResolvedValue([mockUserQuest]);
await questService.handleEvent("1", "ITEM_COLLECT_UNRELATED", 1);
expect(mockUpdate).not.toHaveBeenCalled();
});
it("should not progress a specific quest with a different specific event", async () => {
const mockUserQuest = {
userId: 1n,
questId: 104,
progress: 0,
completedAt: null,
quest: { triggerEvent: "ITEM_COLLECT:101", requirements: { target: 5 } }
};
mockFindMany.mockResolvedValue([mockUserQuest]);
await questService.handleEvent("1", "ITEM_COLLECT:202", 1);
expect(mockUpdate).not.toHaveBeenCalled();
});
it("should not progress a specific quest with a generic event", async () => {
const mockUserQuest = {
userId: 1n,
questId: 105,
progress: 0,
completedAt: null,
quest: { triggerEvent: "ITEM_COLLECT:101", requirements: { target: 5 } }
};
mockFindMany.mockResolvedValue([mockUserQuest]);
await questService.handleEvent("1", "ITEM_COLLECT", 1);
expect(mockUpdate).not.toHaveBeenCalled();
});
it("should ignore irrelevant events", async () => {
const mockUserQuest = {
userId: 1n,
questId: 101,
progress: 0,
completedAt: null,
quest: { triggerEvent: "DIFFERENT_EVENT", requirements: { target: 5 } }
};
mockFindMany.mockResolvedValue([mockUserQuest]);
await questService.handleEvent("1", "TEST_EVENT", 1);
expect(mockUpdate).not.toHaveBeenCalled();
});
});
});

View File

@@ -1,4 +1,4 @@
import { userQuests } from "@db/schema";
import { userQuests, quests } from "@db/schema";
import { eq, and } from "drizzle-orm";
import { UserError } from "@shared/lib/errors";
import { DrizzleClient } from "@shared/db/DrizzleClient";
@@ -7,6 +7,7 @@ import { levelingService } from "@shared/modules/leveling/leveling.service";
import { withTransaction } from "@/lib/db";
import type { Transaction } from "@shared/lib/types";
import { TransactionType } from "@shared/lib/constants";
import { systemEvents, EVENTS } from "@shared/lib/events";
export const questService = {
assignQuest: async (userId: string, questId: number, tx?: Transaction) => {
@@ -34,6 +35,40 @@ export const questService = {
}, tx);
},
handleEvent: async (userId: string, eventName: string, weight: number = 1, tx?: Transaction) => {
return await withTransaction(async (txFn) => {
// 1. Fetch active user quests for this event
const activeUserQuests = await txFn.query.userQuests.findMany({
where: and(
eq(userQuests.userId, BigInt(userId)),
),
with: {
quest: true
}
});
const relevant = activeUserQuests.filter(uq => {
const trigger = uq.quest.triggerEvent;
// Exact match or prefix match (e.g. ITEM_COLLECT matches ITEM_COLLECT:101)
const isMatch = eventName === trigger || eventName.startsWith(trigger + ":");
return isMatch && !uq.completedAt;
});
for (const uq of relevant) {
const requirements = uq.quest.requirements as { target?: number };
const target = requirements?.target || 1;
const newProgress = (uq.progress || 0) + weight;
if (newProgress >= target) {
await questService.completeQuest(userId, uq.questId, txFn);
} else {
await questService.updateProgress(userId, uq.questId, newProgress, txFn);
}
}
}, tx);
},
completeQuest: async (userId: string, questId: number, tx?: Transaction) => {
return await withTransaction(async (txFn) => {
const userQuest = await txFn.query.userQuests.findFirst({
@@ -73,6 +108,14 @@ export const questService = {
results.xp = xp;
}
// Emit completion event for the bot to handle notifications
systemEvents.emit(EVENTS.QUEST.COMPLETED, {
userId,
questId,
quest: userQuest.quest,
rewards: results
});
return { success: true, rewards: results };
}, tx);
},
@@ -84,5 +127,75 @@ export const questService = {
quest: true,
}
});
},
async getAvailableQuests(userId: string) {
const userQuestIds = (await DrizzleClient.query.userQuests.findMany({
where: eq(userQuests.userId, BigInt(userId)),
columns: {
questId: true
}
})).map(uq => uq.questId);
return await DrizzleClient.query.quests.findMany({
where: (quests, { notInArray }) => userQuestIds.length > 0
? notInArray(quests.id, userQuestIds)
: undefined
});
},
async createQuest(data: {
name: string;
description: string;
triggerEvent: string;
requirements: { target: number };
rewards: { xp: number; balance: number };
}, tx?: Transaction) {
return await withTransaction(async (txFn) => {
return await txFn.insert(quests)
.values({
name: data.name,
description: data.description,
triggerEvent: data.triggerEvent,
requirements: data.requirements,
rewards: data.rewards,
})
.returning();
}, tx);
},
async getAllQuests() {
return await DrizzleClient.query.quests.findMany({
orderBy: (quests, { asc }) => [asc(quests.id)],
});
},
async deleteQuest(id: number, tx?: Transaction) {
return await withTransaction(async (txFn) => {
return await txFn.delete(quests)
.where(eq(quests.id, id))
.returning();
}, tx);
},
async updateQuest(id: number, data: {
name?: string;
description?: string;
triggerEvent?: string;
requirements?: { target?: number };
rewards?: { xp?: number; balance?: number };
}, tx?: Transaction) {
return await withTransaction(async (txFn) => {
return await txFn.update(quests)
.set({
...(data.name !== undefined && { name: data.name }),
...(data.description !== undefined && { description: data.description }),
...(data.triggerEvent !== undefined && { triggerEvent: data.triggerEvent }),
...(data.requirements !== undefined && { requirements: data.requirements }),
...(data.rewards !== undefined && { rewards: data.rewards }),
})
.where(eq(quests.id, id))
.returning();
}, tx);
}
};

View File

@@ -0,0 +1,98 @@
#!/bin/bash
# Cleanup script for Docker resources
# Use: ./shared/scripts/docker-cleanup.sh
# Use: ./shared/scripts/docker-cleanup.sh --full (for aggressive cleanup)
set -e
echo "🧹 Aurora Docker Cleanup"
echo "========================"
echo ""
# Show current disk usage first
echo "📊 Current Docker disk usage:"
docker system df
echo ""
# Stop running containers for this project
echo "📦 Stopping Aurora containers..."
docker compose down 2>/dev/null || true
# Remove dangling images (untagged images from failed builds)
echo ""
echo "🗑️ Removing dangling images..."
docker image prune -f
# Check for --full flag for aggressive cleanup
if [[ "$1" == "--full" ]]; then
echo ""
echo "🔥 Full cleanup mode - removing all unused Docker resources..."
# Remove all unused images, not just dangling ones
echo " → Removing unused images..."
docker image prune -a -f
# Remove build cache
echo " → Removing build cache..."
docker builder prune -a -f
# Remove unused volumes (except named ones we need)
echo " → Removing unused volumes..."
docker volume prune -f
# Remove unused networks
echo " → Removing unused networks..."
docker network prune -f
# Remove node_modules volumes
echo " → Removing node_modules volumes..."
docker volume rm aurora_app_node_modules aurora_web_node_modules 2>/dev/null || true
echo ""
echo "✅ Full cleanup complete!"
else
# Interactive mode
echo ""
read -p "🔧 Remove Docker build cache? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker builder prune -f
echo "✓ Build cache cleared"
fi
echo ""
read -p "🖼️ Remove ALL unused images (not just dangling)? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker image prune -a -f
echo "✓ Unused images removed"
fi
echo ""
read -p "📁 Remove node_modules volumes? (forces fresh install) (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker volume rm aurora_app_node_modules aurora_web_node_modules 2>/dev/null || true
echo "✓ Node modules volumes removed"
fi
echo ""
read -p "🧨 Run full system prune (removes ALL unused data)? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker system prune -a -f --volumes
echo "✓ Full system prune complete"
fi
echo ""
echo "✅ Cleanup complete!"
fi
echo ""
echo "📊 Docker disk usage after cleanup:"
docker system df
echo ""
echo "💡 Tip: Check container logs with: sudo du -sh /var/lib/docker/containers/*/*.log"
echo "💡 Tip: Truncate logs with: sudo truncate -s 0 /var/lib/docker/containers/*/*.log"
echo ""
echo "Run 'docker compose up --build' to rebuild"