refactor(page): migrate to new modular architecture

This commit is contained in:
syntaxbullet
2026-02-09 22:34:14 +01:00
parent 9b9976c70a
commit a137a98377
2 changed files with 102 additions and 1251 deletions

View File

@@ -4,13 +4,14 @@ import TuiSlider from "../components/TuiSlider.astro";
import TuiSegment from "../components/TuiSegment.astro";
import TuiToggle from "../components/TuiToggle.astro";
import TuiButton from "../components/TuiButton.astro";
import Tooltip from "../components/Tooltip.astro";
---
<Layout title="Neko ASCII Auto-Generator">
<Layout title="Syntaxbullet - Digital Wizard">
<div class="hero-wrapper">
<!-- Background Layer: ASCII Art -->
<div class="ascii-layer">
<div id="loading">GENERATING...</div>
<div id="loading">Loading...</div>
<pre id="ascii-result">Preparing art...</pre>
<canvas id="ascii-canvas"></canvas>
</div>
@@ -20,9 +21,10 @@ import TuiButton from "../components/TuiButton.astro";
<div class="max-w-container">
<main class="hero-content">
<div class="hero-text">
<h2>AUTOMATED<br />ASCII<br />SYNTHESIS</h2>
<h2>SYNTAXBULLET</h2>
<p class="tagline">
Real-time image-to-text conversion engine.
Self-taught Munich-based software engineer
passionate about Generative AI, Linux, and the Web.
</p>
</div>
</main>
@@ -38,54 +40,70 @@ import TuiButton from "../components/TuiButton.astro";
label="EXP"
min={0}
max={3}
step={0.1}
step={0.01}
value={1.0}
title="Exposure / Brightness"
description="Adjusts the overall brightness level of the input image before processing."
/>
<TuiSlider
id="contrast"
label="CON"
min={0}
max={3}
step={0.1}
step={0.01}
value={1.0}
title="Contrast"
description="Increases or decreases the difference between light and dark areas."
/>
<TuiSlider
id="saturation"
label="SAT"
min={0}
max={3}
step={0.1}
step={0.01}
value={1.2}
title="Saturation"
description="Controls color intensity. Higher values make colors more vibrant in Color Mode."
/>
<TuiSlider
id="gamma"
label="GAM"
min={0}
max={3}
step={0.1}
step={0.01}
value={1.0}
title="Gamma Correction"
description="Non-linear brightness adjustment. useful for correcting washed-out or too dark images."
/>
<TuiSlider
id="overlayStrength"
label="OVL"
min={0}
max={1}
step={0.1}
step={0.01}
value={0.3}
title="Overlay Blend Strength"
description="Blends the original image over the ASCII output. 0 is pure ASCII, 1 is original image."
/>
<TuiSlider
id="resolution"
label="RES"
min={0.5}
min={0.1}
max={2}
step={0.1}
step={0.01}
value={1.0}
title="Resolution Scale"
description="Adjusts the density of characters. Higher values give more detail but may reduce performance."
/>
<TuiSlider
id="dither"
label="DTH"
min={0}
max={1}
step={0.01}
value={0}
title="Dither Strength"
description="Applies ordered dithering to simulate shading. Useful for low-contrast areas."
/>
</div>
</div>
@@ -101,21 +119,14 @@ import TuiButton from "../components/TuiButton.astro";
id="toggle-color"
label="CLR"
title="Color Output (HTML)"
description="Toggles between monochrome text and colored HTML spans."
/>
<TuiToggle
id="toggle-dither"
label="DTH"
title="Floyd-Steinberg Dithering"
/>
<TuiToggle
id="toggle-denoise"
label="DNZ"
title="Denoise Pre-processing"
/>
<TuiToggle
id="toggle-edges"
label="EDG"
title="Edge Enhancement"
description="Applies a bilateral filter to reduce image noise while preserving edges."
/>
</div>
@@ -132,6 +143,15 @@ import TuiButton from "../components/TuiButton.astro";
options={["AUTO", "ON", "OFF"]}
value="AUTO"
title="Invert Colors"
description="Inverts brightness mapping. AUTO detects dark/light mode."
/>
<TuiSegment
id="segment-edge"
label="EDG"
options={["OFF", "SPL", "SOB", "CNY"]}
value="OFF"
title="Edge Detection Mode"
description="Algorithm used to detect edges. SPL: Simple, SOB: Sobel, CNY: Canny."
/>
<TuiSegment
id="segment-charset"
@@ -146,6 +166,7 @@ import TuiButton from "../components/TuiButton.astro";
]}
value="STD"
title="Character Set"
description="The set of characters used for mapping brightness levels."
/>
</div>
</div>
@@ -162,17 +183,21 @@ import TuiButton from "../components/TuiButton.astro";
label="RESET"
shortcut="R"
title="Reset to Auto-detected Settings"
description="Resets all sliders and toggles to their default values."
/>
<TuiButton
id="btn-next"
label="NEXT"
shortcut="N"
variant="primary"
title="Load Next Image"
description="Discards current image and loads a new one from the queue."
/>
<div
class="queue-display"
title="Buffered Images"
data-tooltip-title="Buffered Images"
data-tooltip-desc="Number of images pre-loaded in background queue."
>
<span class="queue-label">Q:</span>
<span id="val-queue" class="queue-value"
@@ -199,76 +224,25 @@ import TuiButton from "../components/TuiButton.astro";
</div>
<script>
import {
AsciiGenerator,
autoTuneImage,
CHAR_SETS,
} from "../scripts/ascii.js";
import {
fetchRandomAnimeImage,
loadSingleImage,
} from "../scripts/anime-api.js";
import { WebGLAsciiRenderer } from "../scripts/webgl-ascii.js";
import { AsciiController } from "../scripts/ascii-controller";
import { ImageQueue } from "../scripts/image-queue";
import { UIBindings } from "../scripts/ui-bindings";
const generator = new AsciiGenerator();
// ============= Global Cleanup Protocol =============
// Fix for accumulating event listeners and render loops during HMR/Navigation
if (window.__ASCII_APP__) {
console.log("♻️ Disposing previous application instance...");
try {
window.__ASCII_APP__.dispose();
} catch (e) {
console.error("Failed to dispose previous instance:", e);
}
}
// ============= DOM Elements =============
const canvas = document.getElementById(
"ascii-canvas",
) as HTMLCanvasElement;
let webglRenderer: WebGLAsciiRenderer | null = null;
let isWebGLAvailable = false;
try {
webglRenderer = new WebGLAsciiRenderer(canvas);
isWebGLAvailable = true;
} catch (e) {
console.warn(
"WebGL renderer failed to initialize, falling back to CPU",
e,
);
isWebGLAvailable = false;
}
// State
let currentImgUrl: string | null = null;
let currentSettings: Record<string, any> = {
exposure: 1.0,
contrast: 1.0,
saturation: 1.2,
gamma: 1.0,
invert: false,
color: false,
dither: false,
denoise: false,
enhanceEdges: false,
overlayStrength: 0.3,
resolution: 1.0,
charSet: "standard",
};
let invertMode = "auto"; // 'auto', 'on', 'off'
let detectedInvert = false;
let detectedSettings: any = {}; // Store auto-detected settings
// Render Loop State
let dirtyTexture = false;
let dirtyGrid = false;
let dirtyUniforms = false;
// Cache for grid calculations
let cachedGrid: {
widthCols: number;
heightRows: number;
imgEl: HTMLImageElement | null;
} = {
widthCols: 0,
heightRows: 0,
imgEl: null,
};
// Debounce for CPU render
let cpuRenderTimeout: number | undefined;
// DOM Elements
const asciiResult = document.getElementById(
"ascii-result",
) as HTMLPreElement;
@@ -276,464 +250,57 @@ import TuiButton from "../components/TuiButton.astro";
"loading",
) as HTMLDivElement;
if (!asciiResult || !loadingIndicator || !canvas) {
if (!canvas || !asciiResult || !loadingIndicator) {
throw new Error("Critical UI elements missing");
}
// Charset key mapping (short to full)
const charSetKeyMap: Record<string, string> = {
STD: "standard",
EXT: "extended",
BLK: "blocks",
MIN: "minimal",
DOT: "dots",
SHP: "shapes",
};
const charSetReverseMap: Record<string, string> = Object.fromEntries(
Object.entries(charSetKeyMap).map(([k, v]) => [v, k]),
// ============= Initialize =============
const controller = new AsciiController(
canvas,
asciiResult,
loadingIndicator,
);
const queue = new ImageQueue(2);
const ui = new UIBindings(controller, queue, loadNewImage);
// Update UI to reflect current settings using new components
function updateUI() {
// Update sliders
const sliderIds = [
"exposure",
"contrast",
"saturation",
"gamma",
"overlayStrength",
"resolution",
];
sliderIds.forEach((id) => {
const input = document.getElementById(id) as HTMLInputElement;
// removed unused valueDisplay
if (input && currentSettings[id] !== undefined) {
input.value = String(currentSettings[id]);
input.dispatchEvent(new Event("input"));
}
});
// Store instances globally for cleanup
window.__ASCII_APP__ = {
controller,
queue,
ui,
dispose: () => {
controller.dispose();
ui.dispose();
queue.dispose();
window.__ASCII_APP__ = undefined;
},
};
// Update toggles
(window as any).updateToggleState?.(
"toggle-color",
currentSettings.color,
);
(window as any).updateToggleState?.(
"toggle-dither",
currentSettings.dither,
);
(window as any).updateToggleState?.(
"toggle-denoise",
currentSettings.denoise,
);
(window as any).updateToggleState?.(
"toggle-edges",
currentSettings.enhanceEdges,
);
// Update segments
const invertValue =
invertMode === "auto"
? "AUTO"
: currentSettings.invert
? "ON"
: "OFF";
(window as any).updateSegmentValue?.("segment-invert", invertValue);
const charSetShort =
charSetReverseMap[currentSettings.charSet] || "STD";
(window as any).updateSegmentValue?.(
"segment-charset",
charSetShort,
);
// Update queue status
updateQueueStatus();
}
function updateQueueStatus() {
const queueEl = document.getElementById("val-queue");
if (queueEl) {
queueEl.textContent = imageQueue.length.toString();
}
}
function resetToAutoSettings() {
if (Object.keys(detectedSettings).length > 0) {
invertMode = "auto";
detectedInvert = detectedSettings.invert ?? false;
currentSettings = {
...currentSettings,
...detectedSettings,
resolution: currentSettings.resolution, // Keep resolution
color: false, // Reset color to off
};
currentSettings.invert = detectedInvert;
updateUI();
// Full update
calculateGrid().then(() => {
requestRender("all");
});
}
}
function requestRender(type: "texture" | "grid" | "uniforms" | "all") {
if (!isWebGLAvailable) {
// For CPU, we just debounce a full render
clearTimeout(cpuRenderTimeout);
cpuRenderTimeout = window.setTimeout(() => generateCPU(), 50);
return;
}
if (type === "all") {
dirtyTexture = true;
dirtyGrid = true;
dirtyUniforms = true;
} else if (type === "texture") dirtyTexture = true;
else if (type === "grid") dirtyGrid = true;
else if (type === "uniforms") dirtyUniforms = true;
}
async function calculateGrid() {
if (!currentImgUrl) return;
// Dynamic sizing logic to fit screen
const fontAspectRatio = 0.55;
const marginRatio = 0.2;
const screenW = window.innerWidth;
// Available space
const availW = screenW * (1 - marginRatio);
let widthCols = Math.floor(availW / 6); // Assuming ~6px char width
// Apply resolution scaling
widthCols = Math.floor(widthCols * currentSettings.resolution);
if (widthCols > 300) widthCols = 300; // Cap to prevent crashing
if (widthCols < 40) widthCols = 40;
const imgEl = await resolveImage(currentImgUrl);
const imgRatio = imgEl.width / imgEl.height;
const heightRows = widthCols / (imgRatio / fontAspectRatio);
cachedGrid = {
widthCols,
heightRows,
imgEl,
};
return cachedGrid;
}
function renderLoop() {
if (isWebGLAvailable && webglRenderer && cachedGrid.imgEl) {
const charSetContent =
CHAR_SETS[
currentSettings.charSet as keyof typeof CHAR_SETS
] || CHAR_SETS.standard;
// Only act if dirty
if (dirtyTexture || dirtyGrid || dirtyUniforms) {
if (dirtyTexture) {
webglRenderer.updateTexture(cachedGrid.imgEl);
}
if (dirtyGrid) {
// Recalculate canvas size for WebGL
const fontAspectRatio = 0.55;
const gridAspect =
(cachedGrid.widthCols * fontAspectRatio) /
cachedGrid.heightRows;
const screenW = window.innerWidth;
const screenH = window.innerHeight;
const maxW = screenW * 0.95;
const maxH = screenH * 0.95;
let finalW, finalH;
if (gridAspect > maxW / maxH) {
finalW = maxW;
finalH = maxW / gridAspect;
} else {
finalH = maxH;
finalW = maxH * gridAspect;
}
canvas.style.width = `${finalW}px`;
canvas.style.height = `${finalH}px`;
const dpr = window.devicePixelRatio || 1;
canvas.width = finalW * dpr;
canvas.height = finalH * dpr;
webglRenderer.updateGrid(
cachedGrid.widthCols,
Math.floor(cachedGrid.heightRows),
);
}
if (dirtyUniforms || dirtyGrid) {
// Uniforms often depend on grid/atlas state
webglRenderer.updateUniforms({
width: cachedGrid.widthCols,
height: Math.floor(cachedGrid.heightRows),
charSetContent: charSetContent,
...currentSettings,
dither: currentSettings.dither,
denoise: currentSettings.denoise,
// WebGLAsciiRenderer handles Defaults for zoom/magnifier if undefined
zoom: zoom,
zoomCenter: zoomCenter,
mousePos: mousePos,
showMagnifier: showMagnifier,
magnifierRadius: 0.15,
magnifierZoom: 2.5,
} as any);
}
webglRenderer.draw();
dirtyTexture = false;
dirtyGrid = false;
dirtyUniforms = false;
}
}
requestAnimationFrame(renderLoop);
}
// Start the loop
requestAnimationFrame(renderLoop);
async function generateCPU() {
if (!cachedGrid.imgEl) await calculateGrid();
if (!cachedGrid.imgEl) return;
canvas.style.display = "none";
asciiResult.style.display = "block";
try {
const result = await generator.generate(cachedGrid.imgEl, {
width: cachedGrid.widthCols,
height: Math.floor(cachedGrid.heightRows),
...currentSettings,
});
// Handle color output (returns object) vs plain text (returns string)
if (typeof result === "object" && result.isHtml) {
asciiResult.innerHTML = result.output;
} else {
asciiResult.textContent = result as string;
}
// Auto-fit font size
const fontAspectRatio = 0.55;
const screenW = window.innerWidth;
const screenH = window.innerHeight;
const sizeW =
(screenW * 0.9) / (cachedGrid.widthCols * fontAspectRatio);
const sizeH = (screenH * 0.9) / cachedGrid.heightRows;
const bestSize = Math.min(sizeW, sizeH);
asciiResult.style.fontSize = `${Math.max(4, bestSize).toFixed(2)}px`;
asciiResult.style.opacity = "1";
} catch (e) {
console.error("Render error", e);
}
}
async function generate() {
// Legacy wrapper to kick off a render (used by resize listener/init)
await calculateGrid();
if (isWebGLAvailable) {
asciiResult.style.display = "none";
canvas.style.display = "block";
canvas.style.opacity = "1";
requestRender("all");
} else {
generateCPU();
}
}
// Zoom & Magnifier State
let zoom = 1.0;
let zoomCenter = { x: 0.5, y: 0.5 };
let mousePos = { x: -1, y: -1 };
let showMagnifier = false;
const heroWrapper = document.querySelector(".hero-wrapper");
if (heroWrapper) {
heroWrapper.addEventListener(
"wheel",
(e: any) => {
// If over controls, don't zoom
if (e.target.closest("#tui-controls")) return;
// Only zoom if using WebGL (as CPU version doesn't support it yet)
if (webglRenderer) {
e.preventDefault();
const delta = -e.deltaY;
const factor = delta > 0 ? 1.1 : 0.9;
const oldZoom = zoom;
zoom *= factor;
// Cap zoom
zoom = Math.min(Math.max(zoom, 1.0), 10.0);
if (zoom === 1.0) {
zoomCenter = { x: 0.5, y: 0.5 };
} else if (oldZoom !== zoom) {
// Calculate where the mouse is relative to the canvas
const rect = canvas.getBoundingClientRect();
const mx = (e.clientX - rect.left) / rect.width;
const my = (e.clientY - rect.top) / rect.height;
// To zoom into the mouse, we want the image coordinate under the mouse to stay fixed.
// Shader formula: uv = (v_texCoord - C) / Z + C
// We want: (mx - C1) / Z1 + C1 == (mx - C2) / Z2 + C2
const imgX =
(mx - zoomCenter.x) / oldZoom + zoomCenter.x;
const imgY =
(my - zoomCenter.y) / oldZoom + zoomCenter.y;
// Solve for C2: K = (mx - C2) / Z2 + C2 => C2 = (K - mx/Z2) / (1 - 1/Z2)
zoomCenter.x = (imgX - mx / zoom) / (1 - 1 / zoom);
zoomCenter.y = (imgY - my / zoom) / (1 - 1 / zoom);
}
requestRender("uniforms");
}
},
{ passive: false },
);
let magnifierTimeout: ReturnType<typeof setTimeout> | undefined;
heroWrapper.addEventListener("mousemove", (e: any) => {
if (webglRenderer) {
const rect = canvas.getBoundingClientRect();
const mx = (e.clientX - rect.left) / rect.width;
const my = (e.clientY - rect.top) / rect.height;
mousePos = { x: mx, y: my };
// Show magnifier if mouse is over canvas
const wasShowing = showMagnifier;
showMagnifier = mx >= 0 && mx <= 1 && my >= 0 && my <= 1;
if (showMagnifier || wasShowing) {
requestRender("uniforms");
}
}
});
heroWrapper.addEventListener("mouseleave", () => {
if (showMagnifier) {
showMagnifier = false;
requestRender("uniforms");
}
});
}
// Queue System
const imageQueue: { data: any; imgElement: HTMLImageElement }[] = [];
let isFetchingNext = false;
const MAX_QUEUE_SIZE = 2;
async function prefetchNext() {
if (isFetchingNext || imageQueue.length >= MAX_QUEUE_SIZE) return;
if (document.hidden) return;
isFetchingNext = true;
try {
const data = await fetchRandomAnimeImage();
loadingIndicator.style.display = "block";
asciiResult.textContent = `FETCHING... (${imageQueue.length + 1}/${MAX_QUEUE_SIZE})`;
asciiResult.style.opacity = "0.5";
const img = await loadSingleImage(data.url);
imageQueue.push({ data, imgElement: img });
updateQueueStatus();
loadingIndicator.style.display = "none";
} catch (e) {
console.error("Failed to prefetch image:", e);
loadingIndicator.style.display = "none";
} finally {
isFetchingNext = false;
}
}
async function ensureQueueFilled() {
while (imageQueue.length < MAX_QUEUE_SIZE) {
await prefetchNext();
await new Promise((resolve) => setTimeout(resolve, 500));
}
}
// Link settings updates to UI sync
controller.onSettingsChanged(() => ui.updateUI());
let retryCount = 0;
const MAX_RETRIES = 3;
async function loadNewImage() {
// ============= Image Loading =============
async function loadNewImage(): Promise<void> {
try {
let suggestions: any;
let item;
// If queue is empty, show loading and wait for fetch
if (imageQueue.length === 0) {
loadingIndicator.style.display = "block";
asciiResult.textContent = "FETCHING...";
asciiResult.style.opacity = "0.5";
const data = await fetchRandomAnimeImage();
const img = await loadSingleImage(data.url);
currentImgUrl = data.url;
suggestions = autoTuneImage(img, data.meta);
loadingIndicator.style.display = "none";
if (queue.getLength() === 0) {
controller.showLoading("FETCHING...");
item = await queue.fetchDirect();
} else {
// Pop from queue
const nextItem = imageQueue.shift()!;
currentImgUrl = nextItem.data.url;
suggestions = autoTuneImage(
nextItem.imgElement as HTMLImageElement,
nextItem.data.meta,
);
// Trigger refill in background
ensureQueueFilled();
item = queue.pop()!;
queue.ensureFilled(); // Background refill
}
// Reset zoom on new image
zoom = 1.0;
zoomCenter = { x: 0.5, y: 0.5 };
controller.setCurrentImage(item.url, item.suggestions);
retryCount = 0;
// Reset auto mode and apply auto-detected settings
invertMode = "auto";
detectedInvert = suggestions.invert;
detectedSettings = suggestions;
currentSettings = {
...currentSettings,
...suggestions,
// Keep resolution as is
resolution: currentSettings.resolution,
// Keep manual toggles if they were set
color: currentSettings.color,
};
currentSettings.invert = detectedInvert;
retryCount = 0; // Reset retries on success
updateUI();
await generate();
loadingIndicator.style.display = "none";
asciiResult.style.opacity = "1";
ui.updateUI();
await controller.generate();
controller.hideLoading();
} catch (e) {
console.error(e);
if (retryCount < MAX_RETRIES) {
@@ -742,188 +309,18 @@ import TuiButton from "../components/TuiButton.astro";
setTimeout(loadNewImage, 2000);
} else {
asciiResult.textContent = "SIGNAL LOST. PLEASE REFRESH.";
loadingIndicator.style.display = "none";
controller.hideLoading();
}
}
}
function resolveImage(src: string): Promise<HTMLImageElement> {
return new Promise<HTMLImageElement>((resolve, reject) => {
const img = new Image();
img.crossOrigin = "Anonymous";
img.src = src;
img.onload = () => resolve(img);
img.onerror = reject;
});
}
// ============= NEW COMPONENT EVENT LISTENERS =============
// Slider change events
const sliderIds = [
"exposure",
"contrast",
"saturation",
"gamma",
"overlayStrength",
"resolution",
];
sliderIds.forEach((id) => {
const input = document.getElementById(id) as HTMLInputElement;
if (input) {
input.addEventListener("input", () => {
currentSettings[id] = parseFloat(input.value);
if (id === "resolution") {
calculateGrid().then(() => requestRender("grid"));
} else {
requestRender("uniforms");
}
});
}
});
// Toggle change events - use event delegation to catch events from dynamically initialized toggles
document.body.addEventListener("toggle-change", (e: any) => {
const target = e.target as HTMLElement;
if (!target) return;
const toggleId = target.id;
const checked = e.detail?.checked;
switch (toggleId) {
case "toggle-color":
currentSettings.color = checked;
requestRender("uniforms");
break;
case "toggle-dither":
currentSettings.dither = checked;
requestRender("uniforms");
break;
case "toggle-denoise":
currentSettings.denoise = checked;
requestRender("uniforms");
break;
case "toggle-edges":
currentSettings.enhanceEdges = checked;
requestRender("uniforms");
break;
}
});
// Segment change events
document
.getElementById("segment-invert")
?.addEventListener("segment-change", (e: any) => {
const value = e.detail.value;
if (value === "AUTO") {
invertMode = "auto";
currentSettings.invert = detectedInvert;
} else if (value === "ON") {
invertMode = "on";
currentSettings.invert = true;
} else {
invertMode = "off";
currentSettings.invert = false;
}
requestRender("uniforms");
});
document
.getElementById("segment-charset")
?.addEventListener("segment-change", (e: any) => {
const shortKey = e.detail.value;
currentSettings.charSet = charSetKeyMap[shortKey] || "standard";
requestRender("uniforms"); // Charset update uses updateUniforms -> updateAtlas
});
// Action button events
document.getElementById("btn-reset")?.addEventListener("click", (e) => {
e.stopPropagation();
resetToAutoSettings();
});
document.getElementById("btn-next")?.addEventListener("click", (e) => {
e.stopPropagation();
loadNewImage();
});
// Keyboard shortcuts
document.addEventListener("keydown", (e: KeyboardEvent) => {
// Ignore if user is typing in an input
if (
e.target instanceof HTMLInputElement ||
e.target instanceof HTMLTextAreaElement
)
return;
switch (e.key.toLowerCase()) {
case "n": // Next image
loadNewImage();
break;
case "r": // Reset
resetToAutoSettings();
break;
case "i": // Cycle invert (AUTO -> ON -> OFF -> AUTO)
{
// removed unused invertSegment declaration
if (invertMode === "auto") {
invertMode = "on";
currentSettings.invert = true;
} else if (invertMode === "on") {
invertMode = "off";
currentSettings.invert = false;
} else {
invertMode = "auto";
currentSettings.invert = detectedInvert;
}
updateUI();
requestRender("uniforms");
}
break;
case "c": // Toggle color
currentSettings.color = !currentSettings.color;
updateUI();
requestRender("uniforms");
break;
case "d": // Toggle dither
currentSettings.dither = !currentSettings.dither;
updateUI();
requestRender("uniforms");
break;
case "e": // Toggle edges
currentSettings.enhanceEdges =
!currentSettings.enhanceEdges;
updateUI();
requestRender("uniforms");
break;
case "s": // Cycle charset
{
const keys = Object.keys(CHAR_SETS);
const idx = keys.indexOf(currentSettings.charSet);
const nextIdx = (idx + 1) % keys.length;
currentSettings.charSet = keys[nextIdx];
updateUI();
requestRender("uniforms");
}
break;
}
});
// Resize handler
let resizeTimeout: ReturnType<typeof setTimeout> | undefined;
window.addEventListener("resize", () => {
clearTimeout(resizeTimeout);
resizeTimeout = setTimeout(generate, 200);
});
// Periodic queue status update
setInterval(updateQueueStatus, 1000);
// Init
// ============= Initialize UI and Load First Image =============
ui.init();
loadNewImage().then(() => {
ensureQueueFilled(); // Start filling queue after first load
queue.ensureFilled();
});
</script>
<Tooltip />
</Layout>
<style>

View File

@@ -1,546 +0,0 @@
export interface AsciiOptions {
width?: number;
height?: number;
contrast?: number;
exposure?: number;
invert?: boolean;
saturation?: number;
gamma?: number;
charSet?: CharSetKey | string;
color?: boolean;
dither?: boolean;
enhanceEdges?: boolean;
autoStretch?: boolean;
overlayStrength?: number;
aspectMode?: 'fit' | 'fill' | 'stretch';
denoise?: boolean;
fontAspectRatio?: number;
onProgress?: (progress: number) => void;
}
export interface AsciiResult {
output: string;
isHtml: boolean;
width: number;
height: number;
}
export type CharSetKey = 'standard' | 'simple' | 'blocks' | 'minimal' | 'matrix' | 'dots' | 'ascii_extended';
export type AspectMode = 'fit' | 'fill' | 'stretch';
export const CHAR_SETS: Record<CharSetKey, string> = {
standard: '@W%$NQ08GBR&ODHKUgSMw#Xbdp5q9C26APahk3EFVesm{}o4JZcjnuy[f1xi*7zYt(l/I\\v)T?]r><+^"L;|!~:,-_.\' ',
simple: '@%#*+=-:. ',
blocks: '█▓▒░ ',
minimal: '#+-. ',
matrix: 'ハミヒーウシナモニサワツオリアホテマケメエカキムユラセネスタヌヘ1234567890:.=*+-<>',
dots: '⣿⣷⣯⣟⡿⢿⣻⣽⣾⣶⣦⣤⣄⣀⡀ ',
ascii_extended: '░▒▓█▀▄▌▐│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌ '
};
export const ASPECT_MODES: Record<string, AspectMode> = {
fit: 'fit',
fill: 'fill',
stretch: 'stretch'
};
interface ImageMetadata {
color_dominant?: [number, number, number];
color_palette?: [number, number, number][];
has_fine_detail?: boolean;
}
export class AsciiGenerator {
private ctx: CanvasRenderingContext2D | null = null;
private canvas: HTMLCanvasElement | null = null;
private sharpCanvas: HTMLCanvasElement | null = null;
private sharpCtx: CanvasRenderingContext2D | null = null;
private denoiseCanvas: HTMLCanvasElement | null = null;
private denoiseCtx: CanvasRenderingContext2D | null = null;
private colorData: Uint8Array | null = null;
dispose(): void {
this.ctx = null;
this.sharpCtx = null;
this.denoiseCtx = null;
this.colorData = null;
if (this.canvas) {
this.canvas.width = 0;
this.canvas.height = 0;
this.canvas = null;
}
if (this.sharpCanvas) {
this.sharpCanvas.width = 0;
this.sharpCanvas.height = 0;
this.sharpCanvas = null;
}
if (this.denoiseCanvas) {
this.denoiseCanvas.width = 0;
this.denoiseCanvas.height = 0;
this.denoiseCanvas = null;
}
}
async generate(imageSource: string | HTMLImageElement, options: AsciiOptions = {}): Promise<string | AsciiResult> {
if (typeof document === 'undefined') {
throw new Error('AsciiGenerator requires a browser environment.');
}
const onProgress = options.onProgress ?? (() => { });
onProgress(0);
const img = await this.resolveImage(imageSource);
onProgress(10);
const requestedWidth = options.width ?? 100;
const fontAspectRatio = options.fontAspectRatio ?? 0.55;
const imgRatio = this.getImageRatio(img);
const aspectMode = options.aspectMode ?? 'fit';
let width: number, height: number;
if (aspectMode === 'stretch') {
width = requestedWidth;
height = options.height ?? Math.floor(requestedWidth / 2);
} else if (aspectMode === 'fill') {
width = requestedWidth;
const naturalHeight = Math.floor(requestedWidth / (imgRatio / fontAspectRatio));
height = options.height ?? naturalHeight;
} else {
width = requestedWidth;
height = options.height ?? Math.floor(requestedWidth / (imgRatio / fontAspectRatio));
}
let charSet: string = options.charSet ?? 'standard';
if (charSet in CHAR_SETS) {
charSet = CHAR_SETS[charSet as CharSetKey];
}
if (!this.canvas) {
this.canvas = document.createElement('canvas');
}
this.canvas.width = width;
this.canvas.height = height;
this.ctx = this.canvas.getContext('2d');
if (!this.sharpCanvas) {
this.sharpCanvas = document.createElement('canvas');
}
this.sharpCanvas.width = width;
this.sharpCanvas.height = height;
this.sharpCtx = this.sharpCanvas.getContext('2d');
const exposure = options.exposure ?? 1.0;
const contrast = options.contrast ?? 1.0;
const saturation = options.saturation ?? 1.2;
const gamma = options.gamma ?? 1.0;
const dither = options.dither ?? false;
const enhanceEdges = options.enhanceEdges ?? false;
const autoStretch = options.autoStretch !== false;
const overlayStrength = options.overlayStrength ?? 0.3;
const denoise = options.denoise ?? false;
const colorOutput = options.color ?? false;
onProgress(20);
let sourceImage: HTMLImageElement | HTMLCanvasElement = img;
if (denoise) {
if (!this.denoiseCanvas) {
this.denoiseCanvas = document.createElement('canvas');
}
this.denoiseCanvas.width = width;
this.denoiseCanvas.height = height;
this.denoiseCtx = this.denoiseCanvas.getContext('2d');
if (this.denoiseCtx) {
this.denoiseCtx.filter = 'blur(0.5px)';
this.denoiseCtx.drawImage(img, 0, 0, width, height);
sourceImage = this.denoiseCanvas;
}
}
let sx = 0, sy = 0, sw = img.width, sh = img.height;
if (aspectMode === 'fill' && options.height) {
const targetRatio = width / (options.height * fontAspectRatio);
if (imgRatio > targetRatio) {
sw = img.height * targetRatio;
sx = (img.width - sw) / 2;
} else {
sh = img.width / targetRatio;
sy = (img.height - sh) / 2;
}
}
if (this.sharpCtx) {
this.sharpCtx.filter = `brightness(${exposure}) contrast(${contrast}) saturate(${saturation})`;
if (denoise && sourceImage === this.denoiseCanvas) {
this.sharpCtx.drawImage(sourceImage, 0, 0, width, height);
} else {
this.sharpCtx.drawImage(img, sx, sy, sw, sh, 0, 0, width, height);
}
}
if (enhanceEdges && this.sharpCtx) {
this.sharpCtx.filter = 'none';
this.sharpCtx.globalCompositeOperation = 'source-over';
const edgeCanvas = document.createElement('canvas');
edgeCanvas.width = width;
edgeCanvas.height = height;
const edgeCtx = edgeCanvas.getContext('2d');
if (edgeCtx) {
edgeCtx.filter = 'contrast(2) brightness(0.8)';
edgeCtx.drawImage(this.sharpCanvas!, 0, 0);
this.sharpCtx.globalAlpha = 0.4;
this.sharpCtx.globalCompositeOperation = 'multiply';
this.sharpCtx.drawImage(edgeCanvas, 0, 0);
this.sharpCtx.globalCompositeOperation = 'source-over';
this.sharpCtx.globalAlpha = 1.0;
}
}
onProgress(40);
if (this.ctx && this.sharpCanvas) {
this.ctx.globalAlpha = 1.0;
this.ctx.drawImage(this.sharpCanvas, 0, 0);
if (overlayStrength > 0) {
this.ctx.globalCompositeOperation = 'overlay';
this.ctx.globalAlpha = overlayStrength;
this.ctx.drawImage(this.sharpCanvas, 0, 0);
this.ctx.globalCompositeOperation = 'source-over';
this.ctx.globalAlpha = 1.0;
}
}
const imageData = this.ctx!.getImageData(0, 0, width, height);
const pixels = imageData.data;
onProgress(50);
const lumMatrix = new Float32Array(width * height);
let minLum = 1.0, maxLum = 0.0;
if (colorOutput) {
this.colorData = new Uint8Array(width * height * 3);
}
for (let i = 0; i < width * height; i++) {
const offset = i * 4;
const r = pixels[offset];
const g = pixels[offset + 1];
const b = pixels[offset + 2];
let lum = (0.2126 * r + 0.7152 * g + 0.0722 * b) / 255;
if (colorOutput && this.colorData) {
this.colorData[i * 3] = r;
this.colorData[i * 3 + 1] = g;
this.colorData[i * 3 + 2] = b;
}
if (gamma !== 1.0) {
lum = Math.pow(lum, gamma);
}
if (options.invert) {
lum = 1 - lum;
}
lumMatrix[i] = lum;
if (lum < minLum) minLum = lum;
if (lum > maxLum) maxLum = lum;
}
onProgress(60);
const lumRange = maxLum - minLum;
if (autoStretch && lumRange > 0.01) {
for (let i = 0; i < lumMatrix.length; i++) {
lumMatrix[i] = (lumMatrix[i] - minLum) / lumRange;
}
}
if (dither) {
const levels = charSet.length;
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const i = y * width + x;
const oldVal = lumMatrix[i];
const newVal = Math.round(oldVal * (levels - 1)) / (levels - 1);
lumMatrix[i] = newVal;
const error = oldVal - newVal;
if (x + 1 < width) lumMatrix[i + 1] += error * 7 / 16;
if (y + 1 < height) {
if (x > 0) lumMatrix[(y + 1) * width + (x - 1)] += error * 3 / 16;
lumMatrix[(y + 1) * width + x] += error * 5 / 16;
if (x + 1 < width) lumMatrix[(y + 1) * width + (x + 1)] += error * 1 / 16;
}
}
}
}
onProgress(80);
let output = '';
if (colorOutput && this.colorData) {
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const i = y * width + x;
const brightness = Math.max(0, Math.min(1, lumMatrix[i]));
const charIndex = Math.floor(brightness * (charSet.length - 1));
const safeIndex = Math.max(0, Math.min(charSet.length - 1, charIndex));
const char = charSet[safeIndex];
const r = this.colorData[i * 3];
const g = this.colorData[i * 3 + 1];
const b = this.colorData[i * 3 + 2];
const safeChar = char === '<' ? '&lt;' : char === '>' ? '&gt;' : char === '&' ? '&amp;' : char;
output += `<span style="color:rgb(${r},${g},${b})">${safeChar}</span>`;
}
output += '\n';
}
} else {
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const brightness = Math.max(0, Math.min(1, lumMatrix[y * width + x]));
const charIndex = Math.floor(brightness * (charSet.length - 1));
const safeIndex = Math.max(0, Math.min(charSet.length - 1, charIndex));
output += charSet[safeIndex];
}
output += '\n';
}
}
onProgress(100);
if (colorOutput) {
return {
output,
isHtml: true,
width,
height
};
}
return output;
}
private getImageRatio(img: HTMLImageElement): number {
if (img.width && img.height) {
return img.width / img.height;
}
return 1;
}
private resolveImage(src: string | HTMLImageElement): Promise<HTMLImageElement> {
return new Promise((resolve, reject) => {
if (src instanceof HTMLImageElement) {
if (src.complete) return resolve(src);
src.onload = () => resolve(src);
src.onerror = reject;
return;
}
const img = new Image();
img.crossOrigin = 'Anonymous';
img.src = src;
img.onload = () => resolve(img);
img.onerror = () => reject(new Error('Failed to load image'));
});
}
}
export async function imageToAscii(imageSource: string | HTMLImageElement, options: AsciiOptions = {}): Promise<string | AsciiResult> {
const generator = new AsciiGenerator();
return generator.generate(imageSource, options);
}
export function autoTuneImage(img: HTMLImageElement, meta: ImageMetadata | null = null): Partial<AsciiOptions> {
if (typeof document === 'undefined') return {};
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
if (!ctx) return {};
const size = 100;
canvas.width = size;
canvas.height = size;
ctx.drawImage(img, 0, 0, size, size);
const imageData = ctx.getImageData(0, 0, size, size);
const pixels = imageData.data;
const histogram = new Array(256).fill(0);
let totalLum = 0;
for (let i = 0; i < pixels.length; i += 4) {
const lum = Math.round(0.2126 * pixels[i] + 0.7152 * pixels[i + 1] + 0.0722 * pixels[i + 2]);
histogram[lum]++;
totalLum += lum;
}
const pixelCount = pixels.length / 4;
const avgLum = totalLum / pixelCount;
let p5: number | null = null, p95 = 255, count = 0;
for (let i = 0; i < 256; i++) {
count += histogram[i];
if (p5 === null && count > pixelCount * 0.05) p5 = i;
if (count > pixelCount * 0.95) { p95 = i; break; }
}
p5 = p5 ?? 0;
const midPoint = (p5 + p95) / 2;
let exposure = 128 / Math.max(midPoint, 10);
exposure = Math.max(0.4, Math.min(2.8, exposure));
const activeRange = p95 - p5;
let contrast = 1.1;
if (activeRange < 50) contrast = 2.5;
else if (activeRange < 100) contrast = 1.8;
else if (activeRange < 150) contrast = 1.4;
let invert = false;
let saturation = 1.2;
let useEdgeDetection = true;
if (meta) {
const { color_dominant, color_palette } = meta;
if (color_dominant) {
const [r, g, b] = color_dominant;
const domLum = 0.2126 * r + 0.7152 * g + 0.0722 * b;
if (domLum > 140) {
invert = true;
useEdgeDetection = false;
}
}
if (color_palette && Array.isArray(color_palette) && color_palette.length > 0) {
let totalSat = 0;
for (const [r, g, b] of color_palette) {
const max = Math.max(r, g, b);
const delta = max - Math.min(r, g, b);
const s = max === 0 ? 0 : delta / max;
totalSat += s;
}
const avgSat = totalSat / color_palette.length;
if (avgSat > 0.4) saturation = 1.6;
else if (avgSat < 0.1) saturation = 0.0;
else saturation = 1.2;
}
}
if (useEdgeDetection) {
let edgeLumSum = 0;
let edgeCount = 0;
for (let y = 0; y < size; y++) {
for (let x = 0; x < size; x++) {
if (x < 5 || x >= size - 5 || y < 5 || y >= size - 5) {
const i = (y * size + x) * 4;
edgeLumSum += 0.2126 * pixels[i] + 0.7152 * pixels[i + 1] + 0.0722 * pixels[i + 2];
edgeCount++;
}
}
}
const bgLum = edgeLumSum / edgeCount;
if (bgLum > 160) {
invert = true;
}
}
const gamma = avgLum < 80 ? 0.75 : 1.0;
let recommendedCharSet: CharSetKey = 'standard';
let denoise = false;
let enhanceEdges = false;
let overlayStrength = 0.3;
const histogramPeaks = countHistogramPeaks(histogram, pixelCount);
const isHighContrast = activeRange > 180;
const isLowContrast = activeRange < 80;
const isBimodal = histogramPeaks <= 3;
if (isBimodal && activeRange > 150) {
recommendedCharSet = 'minimal';
enhanceEdges = true;
overlayStrength = 0.1;
} else if (isHighContrast) {
recommendedCharSet = 'blocks';
overlayStrength = 0.2;
} else if (isLowContrast) {
recommendedCharSet = 'simple';
denoise = true;
overlayStrength = 0.5;
} else if (activeRange > 100 && activeRange <= 180) {
recommendedCharSet = 'standard';
const noiseLevel = estimateNoiseLevel(pixels, size);
if (noiseLevel > 20) {
denoise = true;
}
}
if (meta?.has_fine_detail) {
recommendedCharSet = 'dots';
}
return {
exposure: parseFloat(exposure.toFixed(2)),
contrast,
invert,
gamma,
saturation: parseFloat(saturation.toFixed(1)),
charSet: recommendedCharSet,
denoise,
enhanceEdges,
overlayStrength
};
}
function countHistogramPeaks(histogram: number[], pixelCount: number): number {
const threshold = pixelCount * 0.02;
let peaks = 0;
let inPeak = false;
for (let i = 1; i < 255; i++) {
const isPeak = histogram[i] > histogram[i - 1] && histogram[i] > histogram[i + 1];
const isSignificant = histogram[i] > threshold;
if (isPeak && isSignificant && !inPeak) {
peaks++;
inPeak = true;
} else if (histogram[i] < threshold / 2) {
inPeak = false;
}
}
return peaks;
}
function estimateNoiseLevel(pixels: Uint8ClampedArray, size: number): number {
let totalVariance = 0;
const samples = 100;
for (let s = 0; s < samples; s++) {
const x = Math.floor(Math.random() * (size - 2)) + 1;
const y = Math.floor(Math.random() * (size - 2)) + 1;
const i = (y * size + x) * 4;
const center = 0.2126 * pixels[i] + 0.7152 * pixels[i + 1] + 0.0722 * pixels[i + 2];
const neighbors = [
(y - 1) * size + x,
(y + 1) * size + x,
y * size + (x - 1),
y * size + (x + 1)
].map(idx => {
const offset = idx * 4;
return 0.2126 * pixels[offset] + 0.7152 * pixels[offset + 1] + 0.0722 * pixels[offset + 2];
});
const avgNeighbor = neighbors.reduce((a, b) => a + b, 0) / 4;
totalVariance += Math.abs(center - avgNeighbor);
}
return totalVariance / samples;
}