Skip to content

Instantly share code, notes, and snippets.

@BurnedChris
Last active November 14, 2025 05:18
Show Gist options
  • Select an option

  • Save BurnedChris/616a72a6b41927b699de3564d4c51a12 to your computer and use it in GitHub Desktop.

Select an option

Save BurnedChris/616a72a6b41927b699de3564d4c51a12 to your computer and use it in GitHub Desktop.
Benchmark scripts for takumi and satori
//✓ Generated 87 OG images using Vercel OG: docs-new-layout/.c15t → /docs-new-layout/public/og
// ⏱️ Total time: 71725ms (1m)
// 📊 Performance: 3214.4882352941177ms (3s) - avg per image
// 🚀 Throughput: 1.21 images/second
import fs from 'node:fs';
import { cpus } from 'node:os';
import path from 'node:path';
import { ImageResponse } from '@vercel/og';
import ms from 'ms';
import React from 'react';
import DocsTemplate from '../src/pkgs/open-graph/docs-open-image-template';
import { loadGoogleFont } from '../src/pkgs/open-graph/utils/load-fonts';
// Make React available globally for the template components
globalThis.React = React;
// Hardcoded paths for simplicity
const SRC_DIR = path.resolve(process.cwd(), '.c15t'); // where your .mdx live
const OUT_DIR =
process.env.OG_OUT_DIR || path.resolve(process.cwd(), 'public', 'og'); // where og images will be written
// Concurrency configuration (matching Takumi script approach)
const DECIMAL_RADIX = 10;
const MAX_CONCURRENCY_CAP = 8; // Match Takumi script cap
// Use same env var name as Takumi script for consistency
const getConcurrency = (): number => {
const envValue = process.env.OG_GENERATION_CONCURRENCY;
if (envValue) {
const parsed = Number.parseInt(envValue, DECIMAL_RADIX);
if (Number.isFinite(parsed) && parsed > 0) {
return parsed;
}
}
const cpuCount = cpus().length;
return Math.max(1, Math.min(MAX_CONCURRENCY_CAP, cpuCount));
};
const MAX_CONCURRENT = getConcurrency();
// Regex patterns defined at top level for performance
const FRONTMATTER_REGEX = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
const TITLE_REGEX = /^title:\s*(.+)$/m;
const DESC_REGEX = /^description:\s*(.+)$/m;
const QUOTE_REGEX = /['"]/g;
const MDX_EXT_REGEX = /\.mdx$/;
const WIN_PATH_REGEX = /\\/g;
const INDEX_REGEX = /\/index$/;
// Image metadata
const imageSize = {
width: 1200,
height: 630,
};
// Constants for calculations
const MILLISECONDS_PER_SECOND = 1000;
// Global font cache to avoid reloading fonts for each image (matching Takumi approach)
let globalFonts: Array<{
name: string;
data: ArrayBuffer;
weight: 400 | 500 | 600 | 700 | 900;
style: 'normal';
}> | null = null;
/**
* Load fonts once and cache them
*/
async function loadFonts(): Promise<
Array<{
name: string;
data: ArrayBuffer;
weight: 400 | 500 | 600 | 700 | 900;
style: 'normal';
}>
> {
if (globalFonts) {
return globalFonts;
}
process.stdout.write('Loading fonts...');
globalFonts = [
{
name: 'Geist',
data: await loadGoogleFont('Geist', '900'),
weight: 900 as const,
style: 'normal' as const,
},
{
name: 'Geist',
data: await loadGoogleFont('Geist', '700'),
weight: 700 as const,
style: 'normal' as const,
},
{
name: 'Geist',
data: await loadGoogleFont('Geist', '600'),
weight: 600 as const,
style: 'normal' as const,
},
{
name: 'Geist',
data: await loadGoogleFont('Geist', '500'),
weight: 500 as const,
style: 'normal' as const,
},
{
name: 'Geist',
data: await loadGoogleFont('Geist', '400'),
weight: 400 as const,
style: 'normal' as const,
},
{
name: 'Geist Mono',
data: await loadGoogleFont('Geist+Mono', '400'),
weight: 400 as const,
style: 'normal' as const,
},
];
process.stdout.write('✓ Fonts loaded\n');
return globalFonts;
}
/**
* Process MDX file to extract metadata and content
*/
function processMdxFile(filePath: string): {
title: string;
description?: string;
url: string;
} | null {
try {
const content = fs.readFileSync(filePath, 'utf8');
// Extract frontmatter and content
const frontmatterMatch = content.match(FRONTMATTER_REGEX);
let title = '';
let description = '';
if (frontmatterMatch) {
const [, frontmatter] = frontmatterMatch;
// Simple frontmatter parsing
const titleMatch = frontmatter.match(TITLE_REGEX);
const descMatch = frontmatter.match(DESC_REGEX);
if (titleMatch) {
title = titleMatch[1].replace(QUOTE_REGEX, '').trim();
}
if (descMatch) {
description = descMatch[1].replace(QUOTE_REGEX, '').trim();
}
}
// Generate relative URL from file path
const relativePath = path.relative(SRC_DIR, filePath);
const urlPath = relativePath
.replace(MDX_EXT_REGEX, '')
.replace(WIN_PATH_REGEX, '/') // Convert Windows paths
.replace(INDEX_REGEX, ''); // Remove index from URLs
const url = `/docs/${urlPath}`;
const displayTitle =
title.length > 0 ? title : path.basename(filePath, '.mdx');
return {
title: displayTitle,
description: description.length > 0 ? description : undefined,
url,
};
} catch (error) {
process.stderr.write(`Failed to process ${filePath}: ${error}\n`);
return null;
}
}
/**
* Ensure directory exists, create if it doesn't
*/
function ensureDir(dir: string): void {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
}
/**
* Get output path for a given MDX file (removes redundant path calculation)
*/
function getOutputPath(mdxFile: string): string {
const relativePath = path.relative(SRC_DIR, mdxFile);
return path.join(OUT_DIR, relativePath.replace(MDX_EXT_REGEX, '.png'));
}
/**
* Log successful image generation
*/
function logSuccess(filePath: string, title?: string): void {
const filename = path.basename(getOutputPath(filePath));
process.stdout.write(`✓ Generated OG image: ${filename} (${title})\n`);
}
/**
* Log and track errors
*/
function logError(filePath: string, error: string, errors: string[]): void {
const errorMsg = `Failed to process ${filePath}: ${error}`;
errors.push(errorMsg);
process.stderr.write(`${errorMsg}\n`);
}
/**
* Walk through directory and find all MDX files
*/
function* walkMdxFiles(dir: string): Generator<string> {
if (!fs.existsSync(dir)) {
return;
}
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
yield* walkMdxFiles(fullPath);
} else if (entry.isFile() && fullPath.endsWith('.mdx')) {
yield fullPath;
}
}
}
/**
* Simple OG image template component for Vercel OG
*/
function DocsOGTemplate({
title,
description,
}: {
title?: string;
description?: string;
}) {
return (
<DocsTemplate data={{ title, description }} debug={false} theme="light" />
);
}
/**
* Generate OG image using Vercel OG with cached fonts
*/
async function generateOGImage(
fonts: Array<{
name: string;
data: ArrayBuffer;
weight: 400 | 500 | 600 | 700 | 900;
style: 'normal';
}>,
title?: string,
description?: string
): Promise<Buffer> {
// Create the ImageResponse with pre-loaded cached fonts
const response = new ImageResponse(
React.createElement(DocsOGTemplate, { title, description }),
{
width: imageSize.width,
height: imageSize.height,
fonts,
}
);
// Convert response to Buffer
const arrayBuffer = await response.arrayBuffer();
return Buffer.from(arrayBuffer);
}
/**
* Process a single MDX file and generate its OG image
*/
async function processFile(
fonts: Array<{
name: string;
data: ArrayBuffer;
weight: 400 | 500 | 600 | 700 | 900;
style: 'normal';
}>,
mdxFile: string
): Promise<{
success: boolean;
title?: string;
error?: string;
filePath: string;
startTime: number;
endTime?: number;
}> {
const startTime = Date.now();
try {
// Process MDX file to get metadata
const metadata = processMdxFile(mdxFile);
if (!metadata) {
return { success: false, filePath: mdxFile, startTime };
}
// Calculate output path
const outputPath = getOutputPath(mdxFile);
// Ensure output directory exists for this file
ensureDir(path.dirname(outputPath));
// Generate the OG image with cached fonts
const imageBuffer = await generateOGImage(
fonts,
metadata.title,
metadata.description
);
// Write the image file
fs.writeFileSync(outputPath, imageBuffer);
const endTime = Date.now();
return {
success: true,
title: metadata.title,
filePath: mdxFile,
startTime,
endTime,
};
} catch (fileError) {
const endTime = Date.now();
return {
success: false,
error: String(fileError),
filePath: mdxFile,
startTime,
endTime,
};
}
}
/**
* Process files concurrently with controlled batching and timing metrics
*/
async function processFilesConcurrently(
fonts: Array<{
name: string;
data: ArrayBuffer;
weight: 400 | 500 | 600 | 700 | 900;
style: 'normal';
}>,
files: string[]
): Promise<{
imageCount: number;
errors: string[];
totalTime: number;
avgLatency: number;
}> {
let imageCount = 0;
const errors: string[] = [];
const latencies: number[] = [];
const overallStart = Date.now();
for (let i = 0; i < files.length; i += MAX_CONCURRENT) {
const chunk = files.slice(i, i + MAX_CONCURRENT);
const results = await Promise.all(
chunk.map((file) => processFile(fonts, file))
);
for (const result of results) {
if (result.success && result.endTime) {
logSuccess(result.filePath, result.title);
imageCount++;
latencies.push(result.endTime - result.startTime);
} else if (result.error) {
logError(result.filePath, result.error, errors);
}
}
const progress = Math.min(i + MAX_CONCURRENT, files.length);
process.stdout.write(
`Progress: ${progress}/${files.length} files processed\n`
);
}
const totalTime = Date.now() - overallStart;
const avgLatency =
latencies.length > 0
? latencies.reduce((a, b) => a + b, 0) / latencies.length
: 0;
return { imageCount, errors, totalTime, avgLatency };
}
/**
* Process all files in a directory using concurrent processing
*/
async function processDirectory(
fonts: Array<{
name: string;
data: ArrayBuffer;
weight: 400 | 500 | 600 | 700 | 900;
style: 'normal';
}>,
dirPath: string
): Promise<{
imageCount: number;
errors: string[];
totalTime: number;
avgLatency: number;
}> {
if (!fs.existsSync(dirPath)) {
return { imageCount: 0, errors: [], totalTime: 0, avgLatency: 0 };
}
// Collect all MDX files
const allFiles = Array.from(walkMdxFiles(dirPath));
if (allFiles.length === 0) {
return { imageCount: 0, errors: [], totalTime: 0, avgLatency: 0 };
}
process.stdout.write(`Found ${allFiles.length} files to process...\n`);
return await processFilesConcurrently(fonts, allFiles);
}
/**
* Generate OG images for all MDX files with font caching and performance metrics
*/
async function generateAllOGImages(): Promise<void> {
try {
if (!fs.existsSync(SRC_DIR)) {
process.stdout.write(`Source directory not found: ${SRC_DIR}\n`);
return;
}
// Ensure output directory exists
ensureDir(OUT_DIR);
// Load fonts once at the beginning (matching Takumi approach)
const fonts = await loadFonts();
process.stdout.write(
`Generating Open Graph images using ${MAX_CONCURRENT} concurrent processes (Vercel OG)...\n`
);
const overallStart = Date.now();
// Process docs and changelog directories
const docsResult = await processDirectory(
fonts,
path.join(SRC_DIR, 'docs')
);
const changelogResult = await processDirectory(
fonts,
path.join(SRC_DIR, 'changelog')
);
const totalImages = docsResult.imageCount + changelogResult.imageCount;
const allErrors = [...docsResult.errors, ...changelogResult.errors];
const totalTime = Date.now() - overallStart;
const combinedLatency =
(docsResult.avgLatency + changelogResult.avgLatency) / 2;
process.stdout.write(
`\n✓ Generated ${totalImages} OG images using Vercel OG: ${SRC_DIR} → ${OUT_DIR}\n`
);
process.stdout.write(`⏱️ Total time: ${totalTime}ms (${ms(totalTime)})\n`);
process.stdout.write(
`📊 Performance: ${combinedLatency}ms (${ms(combinedLatency)}) - avg per image\n`
);
process.stdout.write(
`🚀 Throughput: ${((totalImages / totalTime) * MILLISECONDS_PER_SECOND).toFixed(2)} images/second\n`
);
if (allErrors.length > 0) {
process.stdout.write(`\n⚠️ ${allErrors.length} files had errors:\n`);
for (const error of allErrors) {
process.stdout.write(` - ${error}\n`);
}
}
} catch (error) {
process.stderr.write(`Failed to generate OG images: ${error}\n`);
throw error;
}
}
// Run the generation if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateAllOGImages().catch((error) => {
process.stderr.write(`OG image generation failed: ${error}\n`);
process.exit(1);
});
}
export { generateAllOGImages };
// https://x.com/kanewang_/status/1958609424706416949 based on this tweet
//
// ✓ Generated 87 OG images using Takumi: docs-new-layout/.c15t → /docs-new-layout/public/og
// ⏱️ Total time: 1248ms (1s)
// 📊 Performance: 53.95402298850575ms (53.95402298850575ms) - avg per image
// 🚀 Throughput: 69.71 images/second
import fs from 'node:fs';
import { cpus } from 'node:os';
import path from 'node:path';
import { Renderer } from '@takumi-rs/core';
import { fromJsx } from '@takumi-rs/helpers/jsx';
import ms from 'ms';
import React from 'react';
import DocsTemplate from '../src/pkgs/open-graph/docs-open-image-template';
import { loadGoogleFont } from '../src/pkgs/open-graph/utils/load-fonts';
// Make React available globally for fromJsx
globalThis.React = React;
// Hardcoded paths for simplicity
const SRC_DIR = path.resolve(process.cwd(), '.c15t'); // where your .mdx live
const OUT_DIR =
process.env.OG_OUT_DIR || path.resolve(process.cwd(), 'public', 'og'); // where og images will be written
// Regex patterns defined at top level for performance
const FRONTMATTER_REGEX = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
const TITLE_REGEX = /^title:\s*(.+)$/m;
const DESC_REGEX = /^description:\s*(.+)$/m;
const QUOTE_REGEX = /['"]/g;
const MDX_EXT_REGEX = /\.mdx$/;
const WIN_PATH_REGEX = /\\/g;
const INDEX_REGEX = /\/index$/;
// Image metadata
const imageSize = {
width: 1200,
height: 630,
};
// Constants for calculations
const MILLISECONDS_PER_SECOND = 1000;
// Concurrency configuration (simpler approach like convert-mdx-to-md.ts)
const DECIMAL_RADIX = 10;
const MAX_CONCURRENCY_CAP = 8;
/**
* Ensure directory exists, create if it doesn't
*/
function ensureDir(dir: string): void {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
}
/**
* Walk through directory and find all MDX files
*/
function* walkMdxFiles(dir: string): Generator<string> {
if (!fs.existsSync(dir)) {
return;
}
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
yield* walkMdxFiles(fullPath);
} else if (entry.isFile() && fullPath.endsWith('.mdx')) {
yield fullPath;
}
}
}
/**
* Process MDX file to extract metadata and content
*/
function processMdxFile(filePath: string): {
title: string;
description?: string;
url: string;
} | null {
try {
const content = fs.readFileSync(filePath, 'utf8');
// Extract frontmatter and content
const frontmatterMatch = content.match(FRONTMATTER_REGEX);
let title = '';
let description = '';
if (frontmatterMatch) {
const [, frontmatter] = frontmatterMatch;
// Simple frontmatter parsing
const titleMatch = frontmatter.match(TITLE_REGEX);
const descMatch = frontmatter.match(DESC_REGEX);
if (titleMatch) {
title = titleMatch[1].replace(QUOTE_REGEX, '').trim();
}
if (descMatch) {
description = descMatch[1].replace(QUOTE_REGEX, '').trim();
}
}
// Generate relative URL from file path
const relativePath = path.relative(SRC_DIR, filePath);
const urlPath = relativePath
.replace(MDX_EXT_REGEX, '')
.replace(WIN_PATH_REGEX, '/') // Convert Windows paths
.replace(INDEX_REGEX, ''); // Remove index from URLs
const url = `/docs/${urlPath}`;
const displayTitle =
title.length > 0 ? title : path.basename(filePath, '.mdx');
return {
title: displayTitle,
description: description.length > 0 ? description : undefined,
url,
};
} catch (error) {
process.stderr.write(`Failed to process ${filePath}: ${error}\n`);
return null;
}
}
/**
* Get output path for a given MDX file
*/
function getOutputPath(mdxFile: string): string {
const relativePath = path.relative(SRC_DIR, mdxFile);
return path.join(OUT_DIR, relativePath.replace(MDX_EXT_REGEX, '.png'));
}
/**
* Recursively fix font weight values in a Takumi node tree
* Converts string font weights to numbers as expected by Takumi renderer
*/
const WEIGHT_MAP: Record<string, number> = {
'100': 100,
'200': 200,
'300': 300,
'400': 400,
'500': 500,
'600': 600,
'700': 700,
'800': 800,
'900': 900,
normal: 400,
bold: 700,
};
function fixFontWeights(node: unknown): unknown {
if (!node || typeof node !== 'object') {
return node;
}
const nodeObj = node as Record<string, unknown>;
// Fix current node's style if it exists
const style = nodeObj.style as Record<string, unknown> | undefined;
if (style && typeof style.fontWeight === 'string') {
const weight = WEIGHT_MAP[style.fontWeight];
if (weight !== undefined) {
style.fontWeight = weight;
}
}
// Recursively fix children
if (Array.isArray(nodeObj.children)) {
nodeObj.children = nodeObj.children.map(fixFontWeights);
}
return node;
}
// Global font cache to avoid reloading fonts for each image
let globalFonts: ArrayBuffer[] | null = null;
// Global renderer cache to avoid creating new instances for each image
let globalRenderer: Renderer | null = null;
/**
* Load fonts once and cache them
*/
async function loadFonts(): Promise<ArrayBuffer[]> {
if (globalFonts) {
return globalFonts;
}
process.stdout.write('Loading fonts...\n');
globalFonts = await Promise.all([
loadGoogleFont('Geist', '900'),
loadGoogleFont('Geist', '700'),
loadGoogleFont('Geist', '600'),
loadGoogleFont('Geist', '500'),
loadGoogleFont('Geist', '400'),
loadGoogleFont('Geist+Mono', '400'),
]);
process.stdout.write('✓ Fonts loaded\n');
return globalFonts;
}
/**
* Get or create the global renderer instance
*/
async function getRenderer(): Promise<Renderer> {
if (globalRenderer) {
return globalRenderer;
}
const fonts = await loadFonts();
globalRenderer = new Renderer({
fonts,
persistentImages: [],
});
process.stdout.write('✓ Renderer initialized\n');
return globalRenderer;
}
/**
* Generate OG image using Takumi
*/
async function generateOGImage(
title?: string,
description?: string
): Promise<Buffer> {
// Create the JSX element
const jsxElement = React.createElement(DocsTemplate, {
data: { title, description },
debug: false,
theme: 'light',
});
// Convert JSX to Takumi node
let node = await fromJsx(jsxElement);
// Fix font weights (convert strings to numbers)
// biome-ignore lint/suspicious/noExplicitAny: Required for Takumi node manipulation
node = fixFontWeights(node) as any;
// Get the cached renderer instance
const renderer = await getRenderer();
// Render the image
const imageBuffer = await renderer.renderAsync(node, {
width: imageSize.width,
height: imageSize.height,
});
return imageBuffer;
}
// Move concurrency logic to top level to match Vercel script
const getConcurrency = (): number => {
const envValue = process.env.OG_GENERATION_CONCURRENCY;
if (envValue) {
const parsed = Number.parseInt(envValue, DECIMAL_RADIX);
if (Number.isFinite(parsed) && parsed > 0) {
return parsed;
}
}
const cpuCount = cpus().length;
return Math.max(1, Math.min(MAX_CONCURRENCY_CAP, cpuCount));
};
const CONCURRENCY = getConcurrency();
/**
* Process a single MDX file with timing
*/
async function processFileWithTiming(
mdxFilePath: string
): Promise<{ success: boolean; title?: string; latency: number }> {
const startTime = Date.now();
try {
// Process MDX file to get metadata
const metadata = processMdxFile(mdxFilePath);
if (!metadata) {
return { success: false, latency: Date.now() - startTime };
}
// Calculate output path
const outputPath = getOutputPath(mdxFilePath);
// Ensure output directory exists for this file
ensureDir(path.dirname(outputPath));
// Generate the OG image
const imageBuffer = await generateOGImage(
metadata.title,
metadata.description
);
// Write the image file
fs.writeFileSync(outputPath, imageBuffer);
const latency = Date.now() - startTime;
const filename = path.basename(outputPath);
process.stdout.write(
`✓ Generated OG image: ${filename} (${metadata.title})\n`
);
return { success: true, title: metadata.title, latency };
} catch (fileError) {
const latency = Date.now() - startTime;
process.stderr.write(`Failed to process ${mdxFilePath}: ${fileError}\n`);
return { success: false, latency };
}
}
/**
* Convert all MDX files to OG images with performance metrics
*/
async function generateAllOGImages(): Promise<void> {
try {
if (!fs.existsSync(SRC_DIR)) {
process.stdout.write(`Source directory not found: ${SRC_DIR}\n`);
return;
}
// Ensure output directory exists
ensureDir(OUT_DIR);
// Initialize renderer once at the beginning (this also loads fonts)
await getRenderer();
// Gather all MDX files first
const mdxFiles: string[] = [];
for (const filePath of walkMdxFiles(SRC_DIR)) {
mdxFiles.push(filePath);
}
if (mdxFiles.length === 0) {
process.stdout.write('No MDX files found to process.\n');
return;
}
let successCount = 0;
let errorCount = 0;
const latencies: number[] = [];
process.stdout.write(
`Generating ${mdxFiles.length} OG images using ${CONCURRENCY} concurrent processes (Takumi)...\n`
);
const overallStart = Date.now();
// Process files in chunks
for (let i = 0; i < mdxFiles.length; i += CONCURRENCY) {
const chunk = mdxFiles.slice(i, i + CONCURRENCY);
const results = await Promise.all(
chunk.map((mdxFilePath) => processFileWithTiming(mdxFilePath))
);
// Count results and collect latencies
for (const result of results) {
latencies.push(result.latency);
if (result.success) {
successCount++;
} else {
errorCount++;
}
}
// Show progress
const processed = Math.min(i + CONCURRENCY, mdxFiles.length);
process.stdout.write(
`Progress: ${processed}/${mdxFiles.length} files processed\n`
);
}
const totalTime = Date.now() - overallStart;
const avgLatency =
latencies.length > 0
? latencies.reduce((a, b) => a + b, 0) / latencies.length
: 0;
process.stdout.write(
`\n✓ Generated ${successCount} OG images using Takumi: ${SRC_DIR} → ${OUT_DIR}\n`
);
process.stdout.write(`⏱️ Total time: ${totalTime}ms (${ms(totalTime)})\n`);
process.stdout.write(
`📊 Performance: ${avgLatency}ms (${ms(avgLatency)}) - avg per image\n`
);
process.stdout.write(
`🚀 Throughput: ${((successCount / totalTime) * MILLISECONDS_PER_SECOND).toFixed(2)} images/second\n`
);
if (errorCount > 0) {
process.stdout.write(`⚠️ ${errorCount} files had errors\n`);
}
} catch (error) {
process.stderr.write(`Failed to generate OG images: ${error}\n`);
throw error;
}
}
// Run the generation if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateAllOGImages().catch((error) => {
process.stderr.write(`OG image generation failed: ${error}\n`);
process.exit(1);
});
}
export { generateAllOGImages };
// ✓ Generated 87 OG images using Takumi: docs-new-layout/.c15t → /docs-new-layout/public/og
// these are from a ran it for the tweet
// ⏱️ 1214ms total (1s) ~59x faster
// 📊 52.2ms average per image ~62x faster
// 🚀 71.66 images/second ~59x faster
// When i reran it:
// ⏱️ Total time: 1214ms (1s)
// 📊 Performance: 54.06896551724138ms (54.06896551724138ms) - avg per image
// 🚀 Throughput: 69.99 images/second
import fs from 'node:fs';
import { cpus } from 'node:os';
import path from 'node:path';
import { Renderer } from '@takumi-rs/core';
import { fromJsx } from '@takumi-rs/helpers/jsx';
import ms from 'ms';
import React from 'react';
import DocsTemplate from '../src/pkgs/open-graph/docs-open-image-template';
import { loadGoogleFont } from '../src/pkgs/open-graph/utils/load-fonts';
// Make React available globally for fromJsx
globalThis.React = React;
// Hardcoded paths for simplicity
const SRC_DIR = path.resolve(process.cwd(), '.c15t'); // where your .mdx live
const OUT_DIR =
process.env.OG_OUT_DIR || path.resolve(process.cwd(), 'public', 'og'); // where og images will be written
// Regex patterns defined at top level for performance
const FRONTMATTER_REGEX = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
const TITLE_REGEX = /^title:\s*(.+)$/m;
const DESC_REGEX = /^description:\s*(.+)$/m;
const QUOTE_REGEX = /['"]/g;
const MDX_EXT_REGEX = /\.mdx$/;
const WIN_PATH_REGEX = /\\/g;
const INDEX_REGEX = /\/index$/;
// Image metadata
const imageSize = {
width: 1200,
height: 630,
};
// Constants for calculations
const MILLISECONDS_PER_SECOND = 1000;
// Concurrency configuration (simpler approach like convert-mdx-to-md.ts)
const DECIMAL_RADIX = 10;
const MAX_CONCURRENCY_CAP = 8;
/**
* Ensure directory exists, create if it doesn't
*/
function ensureDir(dir: string): void {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
}
/**
* Walk through directory and find all MDX files
*/
function* walkMdxFiles(dir: string): Generator<string> {
if (!fs.existsSync(dir)) {
return;
}
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
yield* walkMdxFiles(fullPath);
} else if (entry.isFile() && fullPath.endsWith('.mdx')) {
yield fullPath;
}
}
}
/**
* Process MDX file to extract metadata and content
*/
function processMdxFile(filePath: string): {
title: string;
description?: string;
url: string;
} | null {
try {
const content = fs.readFileSync(filePath, 'utf8');
// Extract frontmatter and content
const frontmatterMatch = content.match(FRONTMATTER_REGEX);
let title = '';
let description = '';
if (frontmatterMatch) {
const [, frontmatter] = frontmatterMatch;
// Simple frontmatter parsing
const titleMatch = frontmatter.match(TITLE_REGEX);
const descMatch = frontmatter.match(DESC_REGEX);
if (titleMatch) {
title = titleMatch[1].replace(QUOTE_REGEX, '').trim();
}
if (descMatch) {
description = descMatch[1].replace(QUOTE_REGEX, '').trim();
}
}
// Generate relative URL from file path
const relativePath = path.relative(SRC_DIR, filePath);
const urlPath = relativePath
.replace(MDX_EXT_REGEX, '')
.replace(WIN_PATH_REGEX, '/') // Convert Windows paths
.replace(INDEX_REGEX, ''); // Remove index from URLs
const url = `/docs/${urlPath}`;
const displayTitle =
title.length > 0 ? title : path.basename(filePath, '.mdx');
return {
title: displayTitle,
description: description.length > 0 ? description : undefined,
url,
};
} catch (error) {
process.stderr.write(`Failed to process ${filePath}: ${error}\n`);
return null;
}
}
/**
* Get output path for a given MDX file
*/
function getOutputPath(mdxFile: string): string {
const relativePath = path.relative(SRC_DIR, mdxFile);
return path.join(OUT_DIR, relativePath.replace(MDX_EXT_REGEX, '.png'));
}
/**
* Recursively fix font weight values in a Takumi node tree
* Converts string font weights to numbers as expected by Takumi renderer
*/
const WEIGHT_MAP: Record<string, number> = {
'100': 100,
'200': 200,
'300': 300,
'400': 400,
'500': 500,
'600': 600,
'700': 700,
'800': 800,
'900': 900,
normal: 400,
bold: 700,
};
function fixFontWeights(node: unknown): unknown {
if (!node || typeof node !== 'object') {
return node;
}
const nodeObj = node as Record<string, unknown>;
// Fix current node's style if it exists
const style = nodeObj.style as Record<string, unknown> | undefined;
if (style && typeof style.fontWeight === 'string') {
const weight = WEIGHT_MAP[style.fontWeight];
if (weight !== undefined) {
style.fontWeight = weight;
}
}
// Recursively fix children
if (Array.isArray(nodeObj.children)) {
nodeObj.children = nodeObj.children.map(fixFontWeights);
}
return node;
}
// Global font cache to avoid reloading fonts for each image
let globalFonts: ArrayBuffer[] | null = null;
/**
* Load fonts once and cache them
*/
async function loadFonts(): Promise<ArrayBuffer[]> {
if (globalFonts) {
return globalFonts;
}
process.stdout.write('Loading fonts...\n');
globalFonts = await Promise.all([
loadGoogleFont('Geist', '900'),
loadGoogleFont('Geist', '700'),
loadGoogleFont('Geist', '600'),
loadGoogleFont('Geist', '500'),
loadGoogleFont('Geist', '400'),
loadGoogleFont('Geist+Mono', '400'),
]);
process.stdout.write('✓ Fonts loaded\n');
return globalFonts;
}
/**
* Generate OG image using Takumi
*/
async function generateOGImage(
fonts: ArrayBuffer[],
title?: string,
description?: string
): Promise<Buffer> {
// Create the JSX element
const jsxElement = React.createElement(DocsTemplate, {
data: { title, description },
debug: false,
theme: 'light',
});
// Convert JSX to Takumi node
let node = await fromJsx(jsxElement);
// Fix font weights (convert strings to numbers)
// biome-ignore lint/suspicious/noExplicitAny: Required for Takumi node manipulation
node = fixFontWeights(node) as any;
// Create renderer with fonts
const renderer = new Renderer({
fonts,
persistentImages: [],
});
// Render the image
const imageBuffer = await renderer.renderAsync(node, {
width: imageSize.width,
height: imageSize.height,
});
return imageBuffer;
}
// Move concurrency logic to top level to match Vercel script
const getConcurrency = (): number => {
const envValue = process.env.OG_GENERATION_CONCURRENCY;
if (envValue) {
const parsed = Number.parseInt(envValue, DECIMAL_RADIX);
if (Number.isFinite(parsed) && parsed > 0) {
return parsed;
}
}
const cpuCount = cpus().length;
return Math.max(1, Math.min(MAX_CONCURRENCY_CAP, cpuCount));
};
const CONCURRENCY = getConcurrency();
/**
* Process a single MDX file with timing
*/
async function processFileWithTiming(
fonts: ArrayBuffer[],
mdxFilePath: string
): Promise<{ success: boolean; title?: string; latency: number }> {
const startTime = Date.now();
try {
// Process MDX file to get metadata
const metadata = processMdxFile(mdxFilePath);
if (!metadata) {
return { success: false, latency: Date.now() - startTime };
}
// Calculate output path
const outputPath = getOutputPath(mdxFilePath);
// Ensure output directory exists for this file
ensureDir(path.dirname(outputPath));
// Generate the OG image
const imageBuffer = await generateOGImage(
fonts,
metadata.title,
metadata.description
);
// Write the image file
fs.writeFileSync(outputPath, imageBuffer);
const latency = Date.now() - startTime;
const filename = path.basename(outputPath);
process.stdout.write(
`✓ Generated OG image: ${filename} (${metadata.title})\n`
);
return { success: true, title: metadata.title, latency };
} catch (fileError) {
const latency = Date.now() - startTime;
process.stderr.write(`Failed to process ${mdxFilePath}: ${fileError}\n`);
return { success: false, latency };
}
}
/**
* Convert all MDX files to OG images with performance metrics
*/
async function generateAllOGImages(): Promise<void> {
try {
if (!fs.existsSync(SRC_DIR)) {
process.stdout.write(`Source directory not found: ${SRC_DIR}\n`);
return;
}
// Ensure output directory exists
ensureDir(OUT_DIR);
// Load fonts once at the beginning
const fonts = await loadFonts();
// Gather all MDX files first
const mdxFiles: string[] = [];
for (const filePath of walkMdxFiles(SRC_DIR)) {
mdxFiles.push(filePath);
}
if (mdxFiles.length === 0) {
process.stdout.write('No MDX files found to process.\n');
return;
}
let successCount = 0;
let errorCount = 0;
const latencies: number[] = [];
process.stdout.write(
`Generating ${mdxFiles.length} OG images using ${CONCURRENCY} concurrent processes (Takumi)...\n`
);
const overallStart = Date.now();
// Process files in chunks
for (let i = 0; i < mdxFiles.length; i += CONCURRENCY) {
const chunk = mdxFiles.slice(i, i + CONCURRENCY);
const results = await Promise.all(
chunk.map((mdxFilePath) => processFileWithTiming(fonts, mdxFilePath))
);
// Count results and collect latencies
for (const result of results) {
latencies.push(result.latency);
if (result.success) {
successCount++;
} else {
errorCount++;
}
}
// Show progress
const processed = Math.min(i + CONCURRENCY, mdxFiles.length);
process.stdout.write(
`Progress: ${processed}/${mdxFiles.length} files processed\n`
);
}
const totalTime = Date.now() - overallStart;
const avgLatency =
latencies.length > 0
? latencies.reduce((a, b) => a + b, 0) / latencies.length
: 0;
process.stdout.write(
`\n✓ Generated ${successCount} OG images using Takumi: ${SRC_DIR} → ${OUT_DIR}\n`
);
process.stdout.write(`⏱️ Total time: ${totalTime}ms (${ms(totalTime)})\n`);
process.stdout.write(
`📊 Performance: ${avgLatency}ms (${ms(avgLatency)}) - avg per image\n`
);
process.stdout.write(
`🚀 Throughput: ${((successCount / totalTime) * MILLISECONDS_PER_SECOND).toFixed(2)} images/second\n`
);
if (errorCount > 0) {
process.stdout.write(`⚠️ ${errorCount} files had errors\n`);
}
} catch (error) {
process.stderr.write(`Failed to generate OG images: ${error}\n`);
throw error;
}
}
// Run the generation if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateAllOGImages().catch((error) => {
process.stderr.write(`OG image generation failed: ${error}\n`);
process.exit(1);
});
}
export { generateAllOGImages };
@BurnedChris
Copy link
Author

How the rendered images look

We do have quite a complex OG Image to render and Takumi has not yet got all the same CSS Support as Satori

Takumi Render:
quickstart

Satori Render:
quickstart

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment