From 221435d56866a6784918f21a2f0226fdad77ae28 Mon Sep 17 00:00:00 2001 From: Timothee Guerin Date: Thu, 29 Jan 2026 09:45:23 -0500 Subject: [PATCH 1/4] Benchmark tool --- .../src/core/cli/actions/compile/compile.ts | 10 +- packages/compiler/src/core/linter.ts | 8 +- packages/compiler/src/core/perf.ts | 63 +++++++++ packages/compiler/src/core/program.ts | 36 ++++-- packages/compiler/src/core/stats.ts | 32 +---- packages/compiler/src/core/types.ts | 49 +++++++ packages/compiler/src/utils/index.ts | 1 + packages/compiler/test/typekit/utils.ts | 2 + packages/openapi3/src/openapi.ts | 22 +++- website/src/content/current-sidebar.ts | 1 + .../performance-reporting.md | 120 ++++++++++++++++++ 11 files changed, 293 insertions(+), 51 deletions(-) create mode 100644 packages/compiler/src/core/perf.ts create mode 100644 website/src/content/docs/docs/extending-typespec/performance-reporting.md diff --git a/packages/compiler/src/core/cli/actions/compile/compile.ts b/packages/compiler/src/core/cli/actions/compile/compile.ts index b7ab64d8cf7..e8412be94b7 100644 --- a/packages/compiler/src/core/cli/actions/compile/compile.ts +++ b/packages/compiler/src/core/cli/actions/compile/compile.ts @@ -212,18 +212,20 @@ function printStats(stats: Stats) { printRuntime(stats, "checker", Performance.stage, 4); printGroup(stats, "validation", "validators", Performance.validator, 4); printGroup(stats, "linter", "rules", Performance.lintingRule, 4); - printGroup(stats, "emit", "emitters", Performance.stage, 4); + for (const emitter of Object.keys(stats.emit.emitters).sort()) { + printGroup(stats.emit.emitters, emitter, "steps", Performance.stage, 6); + } } - function printGroup( - base: RuntimeStats, + function printGroup( + base: B, groupName: K, itemsKey: L, perf: readonly [number, number], indent: number = 0, ) { const group: any = base[groupName]; - printKV(groupName, runtimeStr(group["total"] ?? 0), indent); + printKV(groupName as any, runtimeStr(group["total"] ?? 0), indent); for (const [key, value] of Object.entries(group[itemsKey]).sort((a, b) => a[0].localeCompare(b[0]), )) { diff --git a/packages/compiler/src/core/linter.ts b/packages/compiler/src/core/linter.ts index 3d6c9741db4..586f00efd08 100644 --- a/packages/compiler/src/core/linter.ts +++ b/packages/compiler/src/core/linter.ts @@ -5,9 +5,9 @@ import { defineLinter } from "./library.js"; import { createUnusedTemplateParameterLinterRule } from "./linter-rules/unused-template-parameter.rule.js"; import { createUnusedUsingLinterRule } from "./linter-rules/unused-using.rule.js"; import { createDiagnostic } from "./messages.js"; +import { perf } from "./perf.js"; import type { Program } from "./program.js"; import { EventEmitter, mapEventEmitterToNodeListener, navigateProgram } from "./semantic-walker.js"; -import { startTimer } from "./stats.js"; import { Diagnostic, DiagnosticMessages, @@ -197,17 +197,17 @@ export function createLinter( [...filteredRules.keys()].map((x) => ` - ${x}`).join("\n"), ); - const timer = startTimer(); + const timer = perf.startTimer(); const exitCallbacks = []; const EXIT_EVENT_NAME = "exit"; const allPromises: Promise[] = []; for (const rule of filteredRules.values()) { - const createTiming = startTimer(); + const createTiming = perf.startTimer(); const listener = rule.create(createLinterRuleContext(program, rule, diagnostics)); stats.runtime.rules[rule.id] = createTiming.end(); for (const [name, cb] of Object.entries(listener)) { const timedCb = (...args: any[]) => { - const timer = startTimer(); + const timer = perf.startTimer(); const result = (cb as any)(...args); if (name === EXIT_EVENT_NAME && isPromise(result)) { compilerAssert( diff --git a/packages/compiler/src/core/perf.ts b/packages/compiler/src/core/perf.ts new file mode 100644 index 00000000000..ca0de8f42ad --- /dev/null +++ b/packages/compiler/src/core/perf.ts @@ -0,0 +1,63 @@ +import type { PerfReporter, Timer } from "./types.js"; + +export function startTimer(): Timer { + const start = performance.now(); + return { + end: () => { + return performance.now() - start; + }, + }; +} + +export function time(fn: () => void): number { + const timer = startTimer(); + fn(); + return timer.end(); +} + +export async function timeAsync(fn: () => Promise): Promise { + const timer = startTimer(); + await fn(); + return timer.end(); +} + +/** Perf utils */ +export const perf = { + startTimer, + time, + timeAsync, +}; + +export function createPerfReporter(): PerfReporter { + const measures: Record = {}; + function startReportingTimer(label: string): Timer { + const timer = startTimer(); + return { + end: () => { + const time = timer.end(); + measures[label] = time; + return time; + }, + }; + } + + return { + startTimer: startReportingTimer, + time: (label: string, fn: () => T): T => { + const timer = startReportingTimer(label); + const result = fn(); + timer.end(); + return result; + }, + timeAsync: async (label: string, fn: () => Promise): Promise => { + const timer = startReportingTimer(label); + const result = await fn(); + timer.end(); + return result; + }, + report: (label: string, duration: number) => { + measures[label] = duration; + }, + measures, + }; +} diff --git a/packages/compiler/src/core/program.ts b/packages/compiler/src/core/program.ts index d5038ab0343..40d0f49fd6f 100644 --- a/packages/compiler/src/core/program.ts +++ b/packages/compiler/src/core/program.ts @@ -34,6 +34,7 @@ import { Numeric } from "./numeric.js"; import { CompilerOptions } from "./options.js"; import { parse, parseStandaloneTypeReference } from "./parser.js"; import { getDirectoryPath, joinPaths, resolvePath } from "./path-utils.js"; +import { createPerfReporter, perf } from "./perf.js"; import { SourceLoader, SourceResolution, @@ -42,7 +43,7 @@ import { moduleResolutionErrorToDiagnostic, } from "./source-loader.js"; import { createStateAccessors } from "./state-accessors.js"; -import { ComplexityStats, RuntimeStats, Stats, startTimer, time, timeAsync } from "./stats.js"; +import { ComplexityStats, RuntimeStats, Stats } from "./stats.js"; import { CompilerHost, Diagnostic, @@ -61,6 +62,7 @@ import { Namespace, NoTarget, Node, + PerfReporter, SourceFile, Sym, SymbolFlags, @@ -186,15 +188,15 @@ export async function compile( total: 0, emitters: {}, }; - const timer = startTimer(); + const timer = perf.startTimer(); // Emitter stage for (const emitter of program.emitters) { // If in dry mode run and an emitter doesn't support it we have to skip it. if (program.compilerOptions.dryRun && !emitter.library.definition?.capabilities?.dryRun) { continue; } - const { duration } = await emit(emitter, program); - emitStats.emitters[emitter.metadata.name ?? ""] = duration; + const emitterStats = await emit(emitter, program); + emitStats.emitters[emitter.metadata.name ?? ""] = emitterStats; if (options.listFiles) { logEmittedFilesPath(host.logSink); } @@ -278,7 +280,7 @@ async function createProgram( const basedir = getDirectoryPath(resolvedMain) || "/"; await checkForCompilerVersionMismatch(basedir); - runtimeStats.loader = await timeAsync(() => loadSources(resolvedMain)); + runtimeStats.loader = await perf.timeAsync(() => loadSources(resolvedMain)); const emit = options.noEmit ? [] : (options.emit ?? []); const emitterOptions = options.options; @@ -297,7 +299,7 @@ async function createProgram( oldProgram = undefined; const resolver = (program.resolver = createResolver(program)); - runtimeStats.resolver = time(() => resolver.resolveProgram()); + runtimeStats.resolver = perf.time(() => resolver.resolveProgram()); const linter = createLinter(program, (name) => loadLibrary(basedir, name)); linter.registerLinterLibrary(builtInLinterLibraryName, createBuiltInLinterLibrary()); @@ -306,7 +308,7 @@ async function createProgram( } program.checker = createChecker(program, resolver); - runtimeStats.checker = time(() => program.checker.checkProgram()); + runtimeStats.checker = perf.time(() => program.checker.checkProgram()); complexityStats.createdTypes = program.checker.stats.createdTypes; complexityStats.finishedTypes = program.checker.stats.finishedTypes; @@ -634,12 +636,12 @@ async function createProgram( } async function runValidators() { - const start = startTimer(); + const start = perf.startTimer(); runtimeStats.validation = { total: 0, validators: {} }; runCompilerValidators(); runtimeStats.validation.validators.compiler = start.end(); for (const validator of validateCbs) { - const start = startTimer(); + const start = perf.startTimer(); const diagnostics = await runValidator(validator); if (diagnostics && Array.isArray(diagnostics)) { program.reportDiagnostics(diagnostics); @@ -972,7 +974,10 @@ function resolveOptions(options: CompilerOptions): CompilerOptions { return { ...options }; } -async function emit(emitter: EmitterRef, program: Program): Promise<{ duration: number }> { +async function emit( + emitter: EmitterRef, + program: Program, +): Promise<{ total: number; steps: Record }> { const emitterName = emitter.metadata.name ?? ""; const relativePathForEmittedFiles = transformPathForSink(program.host.logSink, emitter.emitterOutputDir) + "/"; @@ -981,8 +986,8 @@ async function emit(emitter: EmitterRef, program: Program): Promise<{ duration: const warnCount = program.diagnostics.filter((x) => x.severity === "warning").length; const logger = createLogger({ sink: program.host.logSink }); return await logger.trackAction(`Running ${emitterName}...`, "", async (task) => { - const start = startTimer(); - await runEmitter(emitter, program); + const start = perf.startTimer(); + const emitterPerfReporter = await runEmitter(emitter, program); const duration = start.end(); const message = `${emitterName} ${pc.green(`${Math.round(duration)}ms`)} ${pc.dim(relativePathForEmittedFiles)}`; const newErrorCount = program.diagnostics.filter((x) => x.severity === "error").length; @@ -994,21 +999,24 @@ async function emit(emitter: EmitterRef, program: Program): Promise<{ duration: } else { task.succeed(message); } - return { duration }; + return { total: duration, steps: emitterPerfReporter.measures }; }); } /** * @param emitter Emitter ref to run */ -async function runEmitter(emitter: EmitterRef, program: Program) { +async function runEmitter(emitter: EmitterRef, program: Program): Promise { + const perfReporter = createPerfReporter(); const context: EmitContext = { program, emitterOutputDir: emitter.emitterOutputDir, options: emitter.options, + perf: perfReporter, }; try { await emitter.emitFunction(context); + return perfReporter; } catch (error: unknown) { throw new ExternalError({ kind: "emitter", metadata: emitter.metadata, error }); } diff --git a/packages/compiler/src/core/stats.ts b/packages/compiler/src/core/stats.ts index 936339ad103..d62d4678876 100644 --- a/packages/compiler/src/core/stats.ts +++ b/packages/compiler/src/core/stats.ts @@ -28,32 +28,12 @@ export interface RuntimeStats { emit: { total: number; emitters: { - [rule: string]: number; + [emitterName: string]: { + total: number; + steps: { + [stepName: string]: number; + }; + }; }; }; } - -export interface Timer { - end: () => number; -} - -export function startTimer(): Timer { - const start = performance.now(); - return { - end: () => { - return performance.now() - start; - }, - }; -} - -export function time(fn: () => void): number { - const timer = startTimer(); - fn(); - return timer.end(); -} - -export async function timeAsync(fn: () => Promise): Promise { - const timer = startTimer(); - await fn(); - return timer.end(); -} diff --git a/packages/compiler/src/core/types.ts b/packages/compiler/src/core/types.ts index 94d439ff56c..1a8cb3b9a4c 100644 --- a/packages/compiler/src/core/types.ts +++ b/packages/compiler/src/core/types.ts @@ -2525,6 +2525,55 @@ export interface EmitContext> { * Emitter custom options defined in createTypeSpecLibrary */ options: TOptions; + + /** + * Performance measurement utilities. + * Use this to report performance of areas of your emitter. + * The information will be displayed when the compiler is run with `--stats` flag. + */ + readonly perf: PerfReporter; +} + +export interface Timer { + end: () => number; +} + +export interface PerfReporter { + /** + * Start timer for the given label. + * + * @example + * ```ts + * const timer = emitContext.perf.startTimer("my-emitter-task"); + * // ... do work + * const elapsed = timer.end(); // my-emitter-task automatically reported to the compiler + * ``` + */ + startTimer(label: string): Timer; + /** Report a sync function elapsed time. */ + time(label: string, callback: () => T): T; + /** Report an async function elapsed time. */ + timeAsync(label: string, callback: () => Promise): Promise; + + /** + * Report a custom elapsed time for the given label. + * Can be used with {@link import("./perf.js").perf} + * @example + * ```ts + * import { perf } from "@typespec/compiler"; + * + * // somewhere in your emitter + * const start = perf.now(); + * await doSomething(); + * const end = perf.now(); + * + * emitContext.perf.report("doSomething", end - start); + * ``` + */ + report(label: string, milliseconds: number): void; + + /** @internal */ + readonly measures: Readonly>; } export type LogLevel = "trace" | "warning" | "error"; diff --git a/packages/compiler/src/utils/index.ts b/packages/compiler/src/utils/index.ts index 2af3b409d38..4e0ab94c97a 100644 --- a/packages/compiler/src/utils/index.ts +++ b/packages/compiler/src/utils/index.ts @@ -2,6 +2,7 @@ // Exports for `@typespec/compiler/utils`. // Be explicit about what get exported so we don't export utils that are not meant to be public. // --------------------------------------- +export { createPerfReporter, perf } from "../core/perf.js"; export { DuplicateTracker } from "./duplicate-tracker.js"; export { Queue, TwoLevelMap, createRekeyableMap, deepClone, deepEquals } from "./misc.js"; export { useStateMap, useStateSet } from "./state-accessor.js"; diff --git a/packages/compiler/test/typekit/utils.ts b/packages/compiler/test/typekit/utils.ts index a89a8f78889..67e78b6108a 100644 --- a/packages/compiler/test/typekit/utils.ts +++ b/packages/compiler/test/typekit/utils.ts @@ -1,3 +1,4 @@ +import { createPerfReporter } from "../../src/core/perf.js"; import type { Program } from "../../src/core/program.js"; import type { EmitContext, Type } from "../../src/core/types.js"; import { createTestHost } from "../../src/testing/test-host.js"; @@ -15,6 +16,7 @@ export async function createContextMock(program?: Program): Promise) { const options = resolveOptions(context); for (const specVersion of options.openapiVersions) { const emitter = createOAPIEmitter(context, options, specVersion); - await emitter.emitOpenAPI(); + const { perf } = await emitter.emitOpenAPI(); + for (const [key, duration] of Object.entries(perf)) { + context.perf.report(key, duration); + } } } @@ -170,6 +174,7 @@ export async function getOpenAPI3( emitterOutputDir: "tsp-output", options: options, + perf: createPerfReporter(), }; const resolvedOptions = resolveOptions(context); @@ -340,7 +345,11 @@ function createOAPIEmitter( } async function emitOpenAPI() { + const computeTimer = perf.startTimer(); + const services = await getOpenAPI(); + const computeTime = computeTimer.end(); + // first, emit diagnostics for (const serviceRecord of services) { if (serviceRecord.versioned) { @@ -353,11 +362,11 @@ function createOAPIEmitter( } if (program.compilerOptions.dryRun || program.hasError()) { - return; + return { perf: { compute: computeTime } }; } const multipleService = services.length > 1; - + const writeTimer = perf.startTimer(); for (const serviceRecord of services) { if (serviceRecord.versioned) { for (const documentRecord of serviceRecord.versions) { @@ -375,6 +384,13 @@ function createOAPIEmitter( }); } } + const writeTime = writeTimer.end(); + return { + perf: { + compute: computeTime, + write: writeTime, + }, + }; } function initializeEmitter( diff --git a/website/src/content/current-sidebar.ts b/website/src/content/current-sidebar.ts index 6da32097370..710fade3f4f 100644 --- a/website/src/content/current-sidebar.ts +++ b/website/src/content/current-sidebar.ts @@ -241,6 +241,7 @@ const sidebar: SidebarItem[] = [ "extending-typespec/testing", "extending-typespec/emitters-basics", "extending-typespec/emitter-framework", + "extending-typespec/performance-reporting", "extending-typespec/emitter-metadata-handling", "extending-typespec/writing-scaffolding-template", ], diff --git a/website/src/content/docs/docs/extending-typespec/performance-reporting.md b/website/src/content/docs/docs/extending-typespec/performance-reporting.md new file mode 100644 index 00000000000..203b056c11d --- /dev/null +++ b/website/src/content/docs/docs/extending-typespec/performance-reporting.md @@ -0,0 +1,120 @@ +--- +title: Performance reporting +--- + +# Performance reporting + +TypeSpec compiler can report performance statistics after a compilation. This can be enabled by passing `--stats` to the CLI. + +```ansi frame="terminal" +tsp compile . --stats +TypeSpec compiler v1.9.0 + +Compilation completed successfully. + +Compiler statistics: + Complexity: + Created types: 494 + Finished types: 319 + Performance: + loader: 19ms + resolver: 6ms + checker: 8ms + validation: 0ms + compiler: 0ms + linter: 0ms +``` + +Since TypeSpec 1.9.0 emitters can now also report their own performance statistics which will then be displayed in the same report. + +Reporting performance in an emitter can be done using the `EmitContext.perf` API. There is a few approaches depending on the use case: + +## 1. `startTimer` + +This approach is useful when you want to start a timer at one point in your code and stop it later. + +```ts +const timer = context.perf.startTimer("my-task"); +// ... do some work +timer.stop(); +``` + +## 2. `time` + +This approach is useful when you have a synchronous function and want to measure its execution time. + +```ts +context.perf.time("my-task", () => { + // ... do some work +}); +``` + +## 3. `timeAsync` + +This approach is useful when you have an asynchronous function and want to measure its execution time. + +```ts +await context.perf.timeAsync("my-task", async () => { + // ... do some work +}); +``` + +## 4. `reportTime` + +This approach is useful when you already have the duration of a task and want to report it directly. +You can then use the `perf` utilities to measure the duration in that task. + +```ts title=emit.ts +const { duration } = runTask(); +context.perf.reportTime("my-task", durationInMs); +``` + +```ts title=task-runner.ts +import { perf } from "@typespec/compiler/utils"; + +function runTask(): { duration: number } { + const timer = perf.startTimer(); + // ... do some work + return { duration: timer.end() }; +} +``` + +## Example + +```ts +export function $onEmit(context: EmitContext) { + const timer = context.perf.startTimer("prepare"); + prepare(); + timer.stop(); + + const renderResult = context.perf.time("render", () => render()); + + context.perf.timeAsync("write", () => writeOutput(renderResult)); +} +``` + +Which would result in the following + +```ansi frame="terminal" +tsp compile . --stats +TypeSpec compiler v1.9.0 + +Compilation completed successfully. + +Compiler statistics: + Complexity: + Created types: 494 + Finished types: 319 + Performance: + loader: 19ms + resolver: 6ms + checker: 8ms + validation: 0ms + compiler: 0ms + linter: 0ms + emit: 128ms + my-emitter: 128ms + prepare: 39ms + render: 28ms + write: 51ms +``` From 1d41ed7dcea416b2c836cc4ce4fa5f484ff8227d Mon Sep 17 00:00:00 2001 From: Timothee Guerin Date: Thu, 29 Jan 2026 06:51:51 -0800 Subject: [PATCH 2/4] Create feature-benchmark-tool-2026-0-29-14-46-47.md --- .../changes/feature-benchmark-tool-2026-0-29-14-46-47.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .chronus/changes/feature-benchmark-tool-2026-0-29-14-46-47.md diff --git a/.chronus/changes/feature-benchmark-tool-2026-0-29-14-46-47.md b/.chronus/changes/feature-benchmark-tool-2026-0-29-14-46-47.md new file mode 100644 index 00000000000..4e38a89915b --- /dev/null +++ b/.chronus/changes/feature-benchmark-tool-2026-0-29-14-46-47.md @@ -0,0 +1,8 @@ +--- +# Change versionKind to one of: internal, fix, dependencies, feature, deprecation, breaking +changeKind: feature +packages: + - "@typespec/compiler" +--- + +[API] Add performance reporting utilities for emitters [See docs for more info](https://typespec.io/docs/extending-typespec/performance-reporting/) From 9728217519a55d6bc05e9a9df542a1646813999c Mon Sep 17 00:00:00 2001 From: Timothee Guerin Date: Thu, 29 Jan 2026 06:57:54 -0800 Subject: [PATCH 3/4] Create feature-benchmark-tool-2026-0-29-14-52-37.md --- .../changes/feature-benchmark-tool-2026-0-29-14-52-37.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .chronus/changes/feature-benchmark-tool-2026-0-29-14-52-37.md diff --git a/.chronus/changes/feature-benchmark-tool-2026-0-29-14-52-37.md b/.chronus/changes/feature-benchmark-tool-2026-0-29-14-52-37.md new file mode 100644 index 00000000000..af50b57571d --- /dev/null +++ b/.chronus/changes/feature-benchmark-tool-2026-0-29-14-52-37.md @@ -0,0 +1,8 @@ +--- +# Change versionKind to one of: internal, fix, dependencies, feature, deprecation, breaking +changeKind: feature +packages: + - "@typespec/openapi3" +--- + +Expose performance information when running with `--stats` From 3be567aeb000fdf2ab170d7c669762bf89dc8812 Mon Sep 17 00:00:00 2001 From: Timothee Guerin Date: Thu, 29 Jan 2026 10:03:53 -0500 Subject: [PATCH 4/4] improved docs --- .../performance-reporting.md | 83 +++++++++++++------ 1 file changed, 58 insertions(+), 25 deletions(-) diff --git a/website/src/content/docs/docs/extending-typespec/performance-reporting.md b/website/src/content/docs/docs/extending-typespec/performance-reporting.md index 203b056c11d..18f8bea1f3f 100644 --- a/website/src/content/docs/docs/extending-typespec/performance-reporting.md +++ b/website/src/content/docs/docs/extending-typespec/performance-reporting.md @@ -1,10 +1,14 @@ --- -title: Performance reporting +title: Performance Reporting --- -# Performance reporting +The TypeSpec compiler can report performance statistics after compilation, helping you identify bottlenecks and optimize your build process. Enable this feature by passing the `--stats` flag to the CLI: -TypeSpec compiler can report performance statistics after a compilation. This can be enabled by passing `--stats` to the CLI. +```bash +tsp compile . --stats +``` + + ```ansi frame="terminal" tsp compile . --stats @@ -25,51 +29,67 @@ Compiler statistics: linter: 0ms ``` -Since TypeSpec 1.9.0 emitters can now also report their own performance statistics which will then be displayed in the same report. + + +The report includes: -Reporting performance in an emitter can be done using the `EmitContext.perf` API. There is a few approaches depending on the use case: +- **Complexity metrics**: Number of types created and finished during compilation +- **Performance breakdown**: Time spent in each compilation phase (loading, resolving, type-checking, validation, and linting) -## 1. `startTimer` +## Emitter Performance Reporting -This approach is useful when you want to start a timer at one point in your code and stop it later. +:::note[Since TypeSpec 1.9.0] +::: +Emitters can report their own performance statistics, which are displayed alongside the compiler metrics in the same report. + +Use the `EmitContext.perf` API to instrument your emitter code. The API provides several methods depending on your use case. + +### `startTimer` - Manual Timer Control + +Best for when the start and stop points are in different parts of your code, or when you need conditional timing: ```ts const timer = context.perf.startTimer("my-task"); -// ... do some work + +// ... do some work across multiple statements + timer.stop(); ``` -## 2. `time` +### `time` - Synchronous Function Timing -This approach is useful when you have a synchronous function and want to measure its execution time. +Best for wrapping synchronous code blocks. Returns the result of the callback function: ```ts -context.perf.time("my-task", () => { +const result = context.perf.time("my-task", () => { // ... do some work + return computedValue; }); ``` -## 3. `timeAsync` +### `timeAsync` - Asynchronous Function Timing -This approach is useful when you have an asynchronous function and want to measure its execution time. +Best for wrapping async operations. Returns a promise with the callback's result: ```ts -await context.perf.timeAsync("my-task", async () => { - // ... do some work +const result = await context.perf.timeAsync("my-task", async () => { + // ... do some async work + return await fetchData(); }); ``` -## 4. `reportTime` +### `reportTime` - Report Pre-measured Duration -This approach is useful when you already have the duration of a task and want to report it directly. -You can then use the `perf` utilities to measure the duration in that task. +Best when you already have timing data from another source (e.g., a child process or external tool): -```ts title=emit.ts +```ts title="emit.ts" const { duration } = runTask(); -context.perf.reportTime("my-task", durationInMs); +context.perf.reportTime("my-task", duration); ``` -```ts title=task-runner.ts +You can use the standalone `perf` utilities to measure duration in code that doesn't have access to the emit context: + +```ts title="task-runner.ts" import { perf } from "@typespec/compiler/utils"; function runTask(): { duration: number } { @@ -79,21 +99,30 @@ function runTask(): { duration: number } { } ``` -## Example +## Complete Example + +Here's how to instrument a typical emitter with multiple phases: ```ts -export function $onEmit(context: EmitContext) { +import { EmitContext } from "@typespec/compiler"; + +export async function $onEmit(context: EmitContext) { + // Manual timer for the preparation phase const timer = context.perf.startTimer("prepare"); prepare(); timer.stop(); + // Wrap synchronous rendering with automatic timing const renderResult = context.perf.time("render", () => render()); - context.perf.timeAsync("write", () => writeOutput(renderResult)); + // Wrap async file writing with automatic timing + await context.perf.timeAsync("write", async () => writeOutput(renderResult)); } ``` -Which would result in the following +Running `tsp compile . --stats` with this instrumented emitter produces: + + ```ansi frame="terminal" tsp compile . --stats @@ -118,3 +147,7 @@ Compiler statistics: render: 28ms write: 51ms ``` + + + +The emitter's custom metrics (`prepare`, `render`, `write`) appear nested under the emitter name, giving you a clear breakdown of where time is spent during code generation.