- {/* Header */}
-
-
- Samples
-
- {selectedIds.size > 0 ? `${selectedIds.size} selected` : `${filteredSamples.length} items`}
-
-
- {selectedIds.size > 0 && (
-
useStore.getState().setSelectedIds(new Set())}
- className="px-2 py-1 text-xs text-text-muted hover:text-text hover:bg-surface rounded transition-colors"
+
+
+
+
- Clear Selection
-
- )}
-
+ {virtualRows.map((virtualRow) => {
+ const row = rows[virtualRow.index];
+ if (!row) return null;
- {/* Grid Container */}
-
-
- {items.map((virtualRow) => {
- const rowIndex = virtualRow.index;
- const startIndex = rowIndex * columnCount;
- const rowSamples = filteredSamples.slice(startIndex, startIndex + columnCount);
-
- return (
-
- {rowSamples.map((sample) => {
- const isSelected = selectedIds.has(sample.id);
- const isHovered = hoveredId === sample.id;
-
- return (
-
handleClick(sample, e)}
- onMouseEnter={() => setHoveredId(sample.id)}
- onMouseLeave={() => setHoveredId(null)}
- >
- {/* Image */}
- {sample.thumbnail ? (
-
- ) : (
-
- No image
-
- )}
-
- {/* Label badge */}
- {sample.label && (
-
-
- {sample.label}
-
-
- )}
-
- {/* Selection indicator */}
- {isSelected && (
-
- )}
-
- );
- })}
- {/* Fill empty cells */}
- {Array.from({ length: columnCount - rowSamples.length }).map((_, i) => (
-
- ))}
-
- );
- })}
-
-
+ const rowSamples = samples.slice(row.startIndex, row.endIndex);
+ const rowBoxes = boxes.slice(row.startIndex, row.endIndex);
- {/* Instructions footer */}
-
-
- Click to select • Cmd/Ctrl+click to multi-select • Shift+click for range
-
+ return (
+
+ {rowSamples.map((sample, i) => {
+ const box = rowBoxes[i];
+ if (!box) return null;
+
+ const isSelected = isLassoSelection ? true : selectedIds.has(sample.id);
+ const isHovered = hoveredId === sample.id;
+
+ return (
+
handleClick(sample, e)}
+ onMouseEnter={() => setHoveredId(sample.id)}
+ onMouseLeave={() => setHoveredId(null)}
+ >
+ {/* Image container - justified layout sizes tile to preserve aspect ratio */}
+ {/* Future: overlays (segmentations, bboxes) will be absolutely positioned here */}
+ {sample.thumbnail ? (
+ // eslint-disable-next-line @next/next/no-img-element
+
+ ) : (
+
+ No preview
+
+ )}
+
+ {/* Label badge */}
+ {sample.label && (
+
+
+ {sample.label}
+
+
+ )}
+
+ {/* Selection indicator */}
+ {isSelected && (
+
+
+
+ )}
+
+ );
+ })}
+
+ );
+ })}
+
+
-
+
);
}
diff --git a/frontend/src/components/Panel.tsx b/frontend/src/components/Panel.tsx
new file mode 100644
index 0000000..5058fa5
--- /dev/null
+++ b/frontend/src/components/Panel.tsx
@@ -0,0 +1,43 @@
+"use client";
+
+import { ReactNode } from "react";
+import { cn } from "@/lib/utils";
+
+interface PanelProps {
+ children: ReactNode;
+ className?: string;
+}
+
+/**
+ * Base panel container with consistent Rerun-style appearance.
+ * No borders or rounded corners - panels should be flush against each other.
+ */
+export function Panel({ children, className }: PanelProps) {
+ return (
+
+ {children}
+
+ );
+}
+
+interface PanelFooterProps {
+ children: ReactNode;
+ className?: string;
+}
+
+/**
+ * Panel footer for keyboard shortcuts/hints.
+ */
+export function PanelFooter({ children, className }: PanelFooterProps) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/frontend/src/components/PanelContextBar.tsx b/frontend/src/components/PanelContextBar.tsx
new file mode 100644
index 0000000..5eb6c14
--- /dev/null
+++ b/frontend/src/components/PanelContextBar.tsx
@@ -0,0 +1,179 @@
+"use client";
+
+import { Check, ChevronDown } from "lucide-react";
+import { type ReactNode } from "react";
+
+import { cn } from "@/lib/utils";
+import { Button } from "@/components/ui/button";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuLabel,
+ DropdownMenuRadioGroup,
+ DropdownMenuRadioItem,
+ DropdownMenuSeparator,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu";
+
+export interface PanelContextOption {
+ value: string;
+ label: string;
+ group?: string;
+ disabled?: boolean;
+}
+
+interface PanelContextBaseItem {
+ id: string;
+ label: string;
+ showLabel?: boolean;
+ value: string;
+ placeholder?: string;
+ valueTitle?: string;
+ valueClassName?: string;
+}
+
+export interface PanelContextStaticItem extends PanelContextBaseItem {
+ kind?: "static";
+}
+
+export interface PanelContextSelectItem extends PanelContextBaseItem {
+ kind: "select";
+ options: PanelContextOption[];
+ onValueChange: (value: string) => void;
+ disabled?: boolean;
+}
+
+export type PanelContextItem = PanelContextStaticItem | PanelContextSelectItem;
+
+interface PanelContextBarProps {
+ items: PanelContextItem[];
+ rightContent?: ReactNode;
+ className?: string;
+}
+
+export function PanelContextBar({ items, rightContent, className }: PanelContextBarProps) {
+ const visibleItems = items.filter((item) => item.value.trim().length > 0 || item.kind === "select");
+
+ if (visibleItems.length === 0 && !rightContent) {
+ return null;
+ }
+
+ return (
+
+
+ {visibleItems.map((item, index) => {
+ const valueTitle = item.valueTitle ?? item.value;
+ const showLabel = item.showLabel ?? item.label.trim().length > 0;
+ const selectedLabel =
+ item.kind === "select"
+ ? item.options.find((option) => option.value === item.value)?.label
+ : undefined;
+ const displayValue = selectedLabel ?? item.value ?? item.placeholder ?? "Select";
+
+ return (
+
+ {index > 0 &&
}
+
+ {showLabel && (
+
+ {item.label}
+
+ )}
+
+ {item.kind === "select" ? (
+
+
+
+ {displayValue}
+
+
+
+
+ {item.options.some((option) => option.group) ? (
+ item.onValueChange(nextValue)}
+ >
+ {Array.from(
+ item.options.reduce((groups, option) => {
+ const groupName = option.group ?? "";
+ const existing = groups.get(groupName);
+ if (existing) {
+ existing.push(option);
+ } else {
+ groups.set(groupName, [option]);
+ }
+ return groups;
+ }, new Map())
+ ).map(([groupName, options], groupIndex, groups) => (
+
+ {groupName && (
+
+ {groupName}
+
+ )}
+ {options.map((option) => (
+
+ {option.label}
+
+ ))}
+ {groupIndex < groups.length - 1 && }
+
+ ))}
+
+ ) : (
+ item.options.map((option) => (
+ item.onValueChange(option.value)}
+ disabled={item.disabled || option.disabled}
+ >
+ {option.label}
+ {option.value === item.value && }
+
+ ))
+ )}
+
+
+ ) : (
+
+ {displayValue}
+
+ )}
+
+ );
+ })}
+
+
+ {rightContent &&
{rightContent}
}
+
+ );
+}
diff --git a/frontend/src/components/PanelHeader.tsx b/frontend/src/components/PanelHeader.tsx
new file mode 100644
index 0000000..399fc25
--- /dev/null
+++ b/frontend/src/components/PanelHeader.tsx
@@ -0,0 +1,43 @@
+"use client";
+
+import { ReactNode } from "react";
+import { cn } from "@/lib/utils";
+import { PanelTitle } from "./PanelTitle";
+
+interface PanelHeaderProps {
+ icon?: ReactNode;
+ title: string;
+ subtitle?: string;
+ children?: ReactNode; // Toolbar actions slot
+ className?: string;
+}
+
+/**
+ * Rerun-style panel header with icon, title, and optional toolbar.
+ *
+ * Design tokens (from Rerun):
+ * - Title bar height: 24px
+ * - Icon size: 14px (3.5 tailwind units)
+ * - Icon-to-text gap: 4px (gap-1)
+ * - Font size: 12px with -0.15px tracking
+ * - Section header font: 11px uppercase
+ */
+export function PanelHeader({ icon, title, subtitle, children, className }: PanelHeaderProps) {
+ return (
+
+
+
+ {subtitle && (
+
{subtitle}
+ )}
+
+ {children && (
+
{children}
+ )}
+
+ );
+}
diff --git a/frontend/src/components/PanelTitle.tsx b/frontend/src/components/PanelTitle.tsx
new file mode 100644
index 0000000..095fb39
--- /dev/null
+++ b/frontend/src/components/PanelTitle.tsx
@@ -0,0 +1,40 @@
+"use client";
+
+import { ReactNode } from "react";
+
+import { cn } from "@/lib/utils";
+
+interface PanelTitleProps {
+ title?: string;
+ icon?: ReactNode;
+ className?: string;
+ titleClassName?: string;
+ iconClassName?: string;
+ fullHeight?: boolean;
+}
+
+export function PanelTitle({
+ title,
+ icon,
+ className,
+ titleClassName,
+ iconClassName,
+ fullHeight = false,
+}: PanelTitleProps) {
+ return (
+
+ {icon && (
+
+ {icon}
+
+ )}
+ {title ?? ""}
+
+ );
+}
diff --git a/frontend/src/components/PlaceholderPanel.tsx b/frontend/src/components/PlaceholderPanel.tsx
new file mode 100644
index 0000000..bf760fa
--- /dev/null
+++ b/frontend/src/components/PlaceholderPanel.tsx
@@ -0,0 +1,39 @@
+"use client";
+
+import React from "react";
+import { HyperViewLogo } from "./icons";
+import { Panel } from "./Panel";
+import { cn } from "@/lib/utils";
+import { X } from "lucide-react";
+
+interface PlaceholderPanelProps {
+ className?: string;
+ onClose?: () => void;
+}
+
+/**
+ * Empty placeholder panel with centered HyperView logo and a close button.
+ * Used for right and bottom zones that are reserved for future features.
+ * The close button is always visible in the top-right corner of the panel content.
+ */
+export function PlaceholderPanel({ className, onClose }: PlaceholderPanelProps) {
+ return (
+
+ {/* Close button always visible in top right */}
+ {onClose && (
+
+
+
+ )}
+
+
+ );
+}
diff --git a/frontend/src/components/ScatterPanel.tsx b/frontend/src/components/ScatterPanel.tsx
index 177fea2..1091e23 100644
--- a/frontend/src/components/ScatterPanel.tsx
+++ b/frontend/src/components/ScatterPanel.tsx
@@ -1,402 +1,367 @@
"use client";
-import { useEffect, useRef, useCallback, useState } from "react";
-import { scaleLinear } from "d3-scale";
+import { useCallback, useEffect, useMemo, useState } from "react";
+import { Settings2 } from "lucide-react";
import { useStore } from "@/store/useStore";
-import type { ViewMode } from "@/types";
-
-// Color utility
-function hexToRgb(hex: string): [number, number, number] {
- const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
- if (result) {
- return [
- parseInt(result[1], 16) / 255,
- parseInt(result[2], 16) / 255,
- parseInt(result[3], 16) / 255,
- ];
- }
- return [0.5, 0.5, 0.5];
-}
-
-// Default colors for points without labels
-const DEFAULT_COLORS = [
- "#e6194b", "#3cb44b", "#ffe119", "#4363d8", "#f58231",
- "#911eb4", "#46f0f0", "#f032e6", "#bcf60c", "#fabebe",
- "#008080", "#e6beff", "#9a6324", "#fffac8", "#800000",
-];
+import { Panel } from "./Panel";
+import { Button } from "@/components/ui/button";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuLabel,
+ DropdownMenuRadioGroup,
+ DropdownMenuRadioItem,
+ DropdownMenuSeparator,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu";
+import {
+ PanelContextBar,
+ type PanelContextItem,
+ type PanelContextOption,
+} from "./PanelContextBar";
+import { useHyperScatter } from "./useHyperScatter";
+import { useLabelLegend } from "./useLabelLegend";
+import type { Geometry } from "@/types";
+import { findLayoutByGeometry, listAvailableGeometries } from "@/lib/layouts";
+import { fetchEmbeddings } from "@/lib/api";
interface ScatterPanelProps {
className?: string;
+ layoutKey?: string;
+ geometry?: Geometry;
}
-export function ScatterPanel({ className = "" }: ScatterPanelProps) {
- const canvasRef = useRef
(null);
- const containerRef = useRef(null);
- const svgGroupRef = useRef(null);
- const scatterplotRef = useRef(null);
- const [isInitialized, setIsInitialized] = useState(false);
-
+export function ScatterPanel({
+ className = "",
+ layoutKey,
+ geometry,
+}: ScatterPanelProps) {
const {
- embeddings,
- viewMode,
- setViewMode,
+ datasetInfo,
+ embeddingsByLayoutKey,
+ setEmbeddingsForLayout,
selectedIds,
setSelectedIds,
+ beginLassoSelection,
hoveredId,
setHoveredId,
+ setActiveLayoutKey,
+ labelFilter,
} = useStore();
- // Sync SVG transform
- const syncSvg = useCallback((event: any) => {
- const { xScale, yScale } = event;
+ const [localGeometry, setLocalGeometry] = useState("euclidean");
+ const [localLayoutKey, setLocalLayoutKey] = useState(null);
+
+ // Check which geometries are available
+ const availableGeometries = useMemo(() => {
+ return listAvailableGeometries(datasetInfo?.layouts ?? []);
+ }, [datasetInfo?.layouts]);
+
+ useEffect(() => {
+ if (geometry) return;
+ if (availableGeometries.length === 0) return;
+ if (!availableGeometries.includes(localGeometry)) {
+ setLocalGeometry(availableGeometries[0]);
+ }
+ }, [availableGeometries, geometry, localGeometry]);
- if (svgGroupRef.current && xScale && yScale) {
- // Calculate transform based on the actual scales used by the scatterplot
- // The SVG is defined in [-1, 1] coordinate space (r=1 circle at 0,0)
- // We want to map [-1, 1] to screen coordinates.
+ const resolvedGeometry = geometry ?? localGeometry;
- // xScale(0) is the screen x-coordinate of the origin
- // xScale(1) is the screen x-coordinate of x=1
- // So the scaling factor for x is xScale(1) - xScale(0)
+ const resolvedLayoutKey = useMemo(() => {
+ if (!datasetInfo) return localLayoutKey ?? layoutKey ?? null;
- const scaleX = xScale(1) - xScale(0);
- const scaleY = yScale(1) - yScale(0);
- const translateX = xScale(0);
- const translateY = yScale(0);
+ if (localLayoutKey) {
+ const exists = datasetInfo.layouts.some((layout) => layout.layout_key === localLayoutKey);
+ if (exists) return localLayoutKey;
+ }
- svgGroupRef.current.setAttribute(
- "transform",
- `matrix(${scaleX}, 0, 0, ${scaleY}, ${translateX}, ${translateY})`
- );
+ if (layoutKey) {
+ const exists = datasetInfo.layouts.some((layout) => layout.layout_key === layoutKey);
+ if (exists) return layoutKey;
}
- }, []);
- // Initialize scatterplot
+ const layout = findLayoutByGeometry(datasetInfo.layouts, resolvedGeometry);
+ return layout?.layout_key ?? datasetInfo.layouts[0]?.layout_key ?? null;
+ }, [datasetInfo, layoutKey, localLayoutKey, resolvedGeometry]);
+
useEffect(() => {
- if (!canvasRef.current || !containerRef.current || isInitialized) return;
-
- let mounted = true;
-
- const initScatterplot = async () => {
- try {
- const createScatterplot = (await import("regl-scatterplot")).default;
-
- if (!mounted || !canvasRef.current || !containerRef.current) return;
-
- const { width, height } = containerRef.current.getBoundingClientRect();
-
- // Initialize D3 scales for synchronization
- // Our data is normalized to [-1, 1]
- const xScale = scaleLinear().domain([-1, 1]);
- const yScale = scaleLinear().domain([-1, 1]);
-
- const scatterplot = createScatterplot({
- canvas: canvasRef.current,
- width,
- height,
- xScale,
- yScale,
- pointSize: 4,
- pointSizeSelected: 8,
- opacity: 0.8,
- opacityInactiveMax: 0.2,
- lassoColor: [0.31, 0.27, 0.90, 1], // Indigo primary #4F46E5
- lassoMinDelay: 10,
- lassoMinDist: 2,
- showReticle: true,
- reticleColor: [1, 1, 1, 0.5],
- colorBy: 'category',
- pointColor: DEFAULT_COLORS,
- });
-
- // Handle view changes to sync SVG
- scatterplot.subscribe("view", syncSvg);
-
- // Initial sync
- const currentXScale = scatterplot.get("xScale");
- const currentYScale = scatterplot.get("yScale");
- if (currentXScale && currentYScale) {
- syncSvg({ xScale: currentXScale, yScale: currentYScale });
- }
-
- // Handle lasso selection
- scatterplot.subscribe("select", ({ points }: { points: number[] }) => {
- if (points.length > 0) {
- const currentEmbeddings = useStore.getState().embeddings;
- if (currentEmbeddings) {
- const selectedSampleIds = new Set(
- points.map((idx) => currentEmbeddings.ids[idx])
- );
- // Mark this as a lasso selection
- useStore.getState().setSelectedIds(selectedSampleIds, true);
- }
- }
- });
-
- // Handle deselection
- scatterplot.subscribe("deselect", () => {
- useStore.getState().setSelectedIds(new Set(), false);
- });
-
- // Handle point hover
- scatterplot.subscribe(
- "pointOver",
- (pointIndex: number) => {
- const currentEmbeddings = useStore.getState().embeddings;
- if (currentEmbeddings && pointIndex >= 0) {
- setHoveredId(currentEmbeddings.ids[pointIndex]);
- }
- }
- );
+ if (!datasetInfo || !localLayoutKey) return;
+ const exists = datasetInfo.layouts.some((layout) => layout.layout_key === localLayoutKey);
+ if (!exists) {
+ setLocalLayoutKey(null);
+ }
+ }, [datasetInfo, localLayoutKey]);
- scatterplot.subscribe("pointOut", () => {
- setHoveredId(null);
- });
+ const resolvedLayout = useMemo(() => {
+ if (!datasetInfo || !resolvedLayoutKey) return null;
+ return datasetInfo.layouts.find((layout) => layout.layout_key === resolvedLayoutKey) ?? null;
+ }, [datasetInfo, resolvedLayoutKey]);
- scatterplotRef.current = scatterplot;
- setIsInitialized(true);
- } catch (error) {
- console.error("Failed to initialize scatterplot:", error);
- }
- };
+ const resolvedSpace = useMemo(() => {
+ if (!datasetInfo || !resolvedLayout) return null;
+ return datasetInfo.spaces.find((space) => space.space_key === resolvedLayout.space_key) ?? null;
+ }, [datasetInfo, resolvedLayout]);
- initScatterplot();
+ const geometryLayouts = useMemo(() => {
+ if (!datasetInfo) return [];
+ return datasetInfo.layouts.filter((layout) => layout.geometry === resolvedGeometry);
+ }, [datasetInfo, resolvedGeometry]);
- return () => {
- if (scatterplotRef.current) {
- scatterplotRef.current.destroy();
- scatterplotRef.current = null;
- setIsInitialized(false);
- }
- };
- }, [syncSvg]);
+ const modelOptions = useMemo(() => {
+ if (!datasetInfo || geometryLayouts.length === 0) return [];
- // Update data when embeddings or viewMode changes
- useEffect(() => {
- if (!scatterplotRef.current || !embeddings) return;
-
- const coords = viewMode === "euclidean" ? embeddings.euclidean : embeddings.hyperbolic;
-
- // If switching to hyperbolic, try to sync SVG immediately
- if (viewMode === "hyperbolic") {
- // Small timeout to ensure SVG is rendered
- setTimeout(() => {
- if (scatterplotRef.current) {
- const xScale = scatterplotRef.current.get("xScale");
- const yScale = scatterplotRef.current.get("yScale");
- if (xScale && yScale) {
- syncSvg({ xScale, yScale });
- }
- }
- }, 0);
- }
+ const seenSpaceKeys = new Set();
- // Build unique categories for color mapping
- // Handle nulls by converting to "undefined"
- const uniqueLabels = [...new Set(embeddings.labels.map((l) => l || "undefined"))];
-
- const labelToCategory: Record = {};
- uniqueLabels.forEach((label, idx) => {
- labelToCategory[label] = idx;
- });
+ return geometryLayouts.flatMap((layout) => {
+ if (seenSpaceKeys.has(layout.space_key)) return [];
+ seenSpaceKeys.add(layout.space_key);
- // Build category array (integer indices for each point)
- const categories = embeddings.labels.map((label) => {
- const key = label || "undefined";
- return labelToCategory[key];
- });
+ const space = datasetInfo.spaces.find((candidate) => candidate.space_key === layout.space_key);
- // Build color palette from label colors
- const colorPalette = uniqueLabels.map((label) => {
- if (label === "undefined") return "#008080"; // Dark teal for undefined
- return embeddings.label_colors[label] || "#808080";
+ return [
+ {
+ value: layout.space_key,
+ label: space?.model_id ?? layout.space_key,
+ group: space?.provider,
+ },
+ ];
});
+ }, [datasetInfo, geometryLayouts]);
- // Set the color palette first
- if (colorPalette.length > 0) {
- scatterplotRef.current.set({ pointColor: colorPalette });
- }
+ const selectedSpaceKey = resolvedLayout?.space_key ?? modelOptions[0]?.value ?? "";
- scatterplotRef.current.draw({
- x: coords.map((c) => c[0]),
- y: coords.map((c) => c[1]),
- category: categories,
- });
-
- // Reset view to fit new points
- scatterplotRef.current.reset();
-
- // Try to sync again after draw
- if (viewMode === "hyperbolic") {
- setTimeout(() => {
- if (scatterplotRef.current) {
- const xScale = scatterplotRef.current.get("xScale");
- const yScale = scatterplotRef.current.get("yScale");
- if (xScale && yScale) {
- syncSvg({ xScale, yScale });
- }
- }
- }, 100);
- }
- }, [embeddings, viewMode, isInitialized, syncSvg]);
+ const selectedProjectionMethod = resolvedLayout?.method ?? "";
- // Sync selection from store to scatterplot
- useEffect(() => {
- if (!scatterplotRef.current || !embeddings) return;
+ const selectedModelLabel =
+ modelOptions.find((option) => option.value === selectedSpaceKey)?.label ??
+ resolvedSpace?.model_id ??
+ "";
- const selectedIndices = Array.from(selectedIds)
- .map((id) => embeddings.ids.indexOf(id))
- .filter((idx) => idx !== -1);
+ const projectionMethodOptions = useMemo(() => {
+ const methodsForSelectedModel = geometryLayouts
+ .filter((layout) => layout.space_key === selectedSpaceKey)
+ .map((layout) => layout.method);
- scatterplotRef.current.select(selectedIndices, { preventEvent: true });
- }, [selectedIds, embeddings, isInitialized]);
+ const sourceMethods =
+ methodsForSelectedModel.length > 0
+ ? methodsForSelectedModel
+ : geometryLayouts.map((layout) => layout.method);
- // Handle resize
- useEffect(() => {
- if (!containerRef.current || !scatterplotRef.current) return;
-
- const resizeObserver = new ResizeObserver((entries) => {
- for (const entry of entries) {
- const { width, height } = entry.contentRect;
- if (width > 0 && height > 0 && scatterplotRef.current) {
- scatterplotRef.current.set({ width, height });
- }
+ return Array.from(new Set(sourceMethods)).sort();
+ }, [geometryLayouts, selectedSpaceKey]);
+
+ const handleModelChange = useCallback(
+ (nextSpaceKey: string) => {
+ if (!nextSpaceKey || geometryLayouts.length === 0) return;
+
+ const targetLayout =
+ geometryLayouts.find(
+ (layout) =>
+ layout.space_key === nextSpaceKey && layout.method === selectedProjectionMethod
+ ) ?? geometryLayouts.find((layout) => layout.space_key === nextSpaceKey);
+
+ if (targetLayout) {
+ setLocalLayoutKey(targetLayout.layout_key);
}
- });
+ },
+ [geometryLayouts, selectedProjectionMethod]
+ );
- resizeObserver.observe(containerRef.current);
- return () => resizeObserver.disconnect();
- }, [isInitialized]);
+ const handleProjectionMethodChange = useCallback(
+ (nextMethod: string) => {
+ if (!nextMethod || geometryLayouts.length === 0) return;
- // Get unique labels for legend
- const uniqueLabels = embeddings
- ? [...new Set(embeddings.labels.map((l) => l || "undefined"))]
- : [];
+ const targetLayout =
+ geometryLayouts.find(
+ (layout) =>
+ layout.method === nextMethod &&
+ layout.space_key === selectedSpaceKey
+ ) ?? geometryLayouts.find((layout) => layout.method === nextMethod);
- return (
-
- {/* Header */}
-
-
-
Embeddings
-
- {/* View mode toggle */}
-
-
setViewMode("euclidean")}
- className={`px-3 py-1 text-xs transition-colors ${
- viewMode === "euclidean"
- ? "bg-primary text-white"
- : "bg-surface hover:bg-surface-light text-text-muted"
- }`}
- >
- Euclidean
-
-
setViewMode("hyperbolic")}
- className={`px-3 py-1 text-xs transition-colors ${
- viewMode === "hyperbolic"
- ? "bg-primary text-white"
- : "bg-surface hover:bg-surface-light text-text-muted"
- }`}
+ if (targetLayout) {
+ setLocalLayoutKey(targetLayout.layout_key);
+ }
+ },
+ [geometryLayouts, selectedSpaceKey]
+ );
+
+ const contextItems = useMemo(() => {
+ return [
+ {
+ id: "model",
+ kind: "select",
+ label: "Model",
+ showLabel: false,
+ value: selectedSpaceKey,
+ placeholder: "Select model",
+ valueTitle: selectedModelLabel,
+ valueClassName: "max-w-[340px]",
+ options: modelOptions,
+ onValueChange: handleModelChange,
+ disabled: modelOptions.length === 0,
+ },
+ ];
+ }, [handleModelChange, modelOptions, selectedModelLabel, selectedSpaceKey]);
+
+ const projectionSettings = useMemo(
+ () => (
+
+
+
+
+
+
+
+
+ Projection method
+
+
+ {projectionMethodOptions.length > 0 ? (
+
- Hyperbolic
-
-
-
+ {projectionMethodOptions.map((method) => (
+
+ {method}
+
+ ))}
+
+ ) : (
+
+ No projection methods available
+
+ )}
+
+
+ ),
+ [
+ handleProjectionMethodChange,
+ projectionMethodOptions,
+ selectedProjectionMethod,
+ ]
+ );
-
- {embeddings ? `${embeddings.ids.length} points` : "Loading..."}
-
-
+ const embeddings = resolvedLayoutKey ? embeddingsByLayoutKey[resolvedLayoutKey] ?? null : null;
+
+ useEffect(() => {
+ if (!resolvedLayoutKey) return;
+ setActiveLayoutKey(resolvedLayoutKey);
+ }, [resolvedLayoutKey, setActiveLayoutKey]);
+
+ useEffect(() => {
+ if (!resolvedLayoutKey) return;
+ if (embeddingsByLayoutKey[resolvedLayoutKey]) return;
+
+ let cancelled = false;
+
+ fetchEmbeddings(resolvedLayoutKey)
+ .then((data) => {
+ if (cancelled) return;
+ setEmbeddingsForLayout(resolvedLayoutKey, data);
+ })
+ .catch((err) => {
+ if (cancelled) return;
+ console.error("Failed to load embeddings:", err);
+ })
+
+ return () => {
+ cancelled = true;
+ };
+ }, [embeddingsByLayoutKey, resolvedLayoutKey, setEmbeddingsForLayout]);
+
+ const { labelsInfo } = useLabelLegend({ datasetInfo, embeddings, labelFilter });
- {/* Main content area */}
-
+ const {
+ canvasRef,
+ overlayCanvasRef,
+ containerRef,
+ handlePointerDown,
+ handlePointerMove,
+ handlePointerUp,
+ handlePointerLeave,
+ handleDoubleClick,
+ rendererError,
+ } = useHyperScatter({
+ embeddings,
+ labelsInfo,
+ selectedIds,
+ hoveredId,
+ setSelectedIds,
+ beginLassoSelection,
+ setHoveredId,
+ hoverEnabled: !labelFilter,
+ });
+
+ const focusLayout = useCallback(() => {
+ if (!resolvedLayoutKey) return;
+ setActiveLayoutKey(resolvedLayoutKey);
+ }, [resolvedLayoutKey, setActiveLayoutKey]);
+
+ const loadingLabel = resolvedLayoutKey
+ ? "Loading embeddings..."
+ : "No embeddings layout available";
+
+ return (
+
+
+
+ {/* Main content area - min-h-0 prevents flex overflow */}
+
{/* Canvas container */}
-
+
{
+ focusLayout();
+ handlePointerDown(e);
+ }}
+ onPointerMove={handlePointerMove}
+ onPointerUp={handlePointerUp}
+ onPointerCancel={handlePointerUp}
+ onPointerLeave={handlePointerLeave}
+ onDoubleClick={handleDoubleClick}
+ onPointerEnter={focusLayout}
/>
- {/* Poincaré disk boundary for hyperbolic mode */}
- {viewMode === "hyperbolic" && (
-
-
- {/* Main Boundary Circle - scaled to match data (max r ≈ 0.9) */}
-
-
- {/* Grid Circles - adjusted for 0.65 hyperbolic scaling factor */}
- {/* After scaling: d=1 => r≈0.316, d=2 => r≈0.569, d=3 => r≈0.748 */}
-
-
-
-
- {/* Radial Lines - scaled to boundary */}
-
-
- {/* Diagonals */}
-
-
-
-
- )}
+ {/* Lasso overlay (screen-space) */}
+
{/* Loading overlay */}
- {!embeddings && (
-
-
Loading embeddings...
+ {rendererError ? (
+
+
+
Browser not supported
+
{rendererError}
+
+ ) : (
+ !embeddings && (
+
+ )
)}
- {/* Legend */}
- {uniqueLabels.length > 0 && (
-
-
Labels
-
- {uniqueLabels.slice(0, 20).map((label) => (
-
- ))}
- {uniqueLabels.length > 20 && (
-
- +{uniqueLabels.length - 20} more
-
- )}
-
-
- )}
-
-
- {/* Instructions */}
-
-
- Shift+drag to lasso select • Scroll to zoom • Drag to pan
-
-
+
);
}
diff --git a/frontend/src/components/VideoGrid.tsx b/frontend/src/components/VideoGrid.tsx
new file mode 100644
index 0000000..261e1b8
--- /dev/null
+++ b/frontend/src/components/VideoGrid.tsx
@@ -0,0 +1,383 @@
+"use client";
+
+import { useCallback, useEffect, useMemo, useRef, useState } from "react";
+import { useVirtualizer } from "@tanstack/react-virtual";
+import justifiedLayout from "justified-layout";
+import { Play } from "lucide-react";
+import { useStore } from "@/store/useStore";
+import { getVideoUrl } from "@/lib/api";
+import { Panel } from "./Panel";
+import { CheckIcon } from "./icons";
+import type { Sample } from "@/types";
+
+interface VideoGridProps {
+ samples: Sample[];
+ onLoadMore?: () => void;
+ hasMore?: boolean;
+}
+
+const BOX_SPACING = 2;
+const TARGET_ROW_HEIGHT = 180;
+const DEFAULT_ASPECT_RATIO = 1;
+
+function getAspectRatio(sample: Sample): number {
+ if (sample.width && sample.height && sample.height > 0) {
+ return sample.width / sample.height;
+ }
+ return DEFAULT_ASPECT_RATIO;
+}
+
+function computeLayout(
+ samples: Sample[],
+ containerWidth: number
+): {
+ boxes: Array<{ width: number; height: number; top: number; left: number }>;
+ containerHeight: number;
+} {
+ if (samples.length === 0 || containerWidth <= 0) {
+ return { boxes: [], containerHeight: 0 };
+ }
+
+ const aspectRatios = samples.map(getAspectRatio);
+
+ const geometry = justifiedLayout(aspectRatios, {
+ containerWidth,
+ containerPadding: 0,
+ boxSpacing: BOX_SPACING,
+ targetRowHeight: TARGET_ROW_HEIGHT,
+ targetRowHeightTolerance: 0.25,
+ showWidows: true,
+ });
+
+ return {
+ boxes: geometry.boxes,
+ containerHeight: geometry.containerHeight,
+ };
+}
+
+interface RowData {
+ startIndex: number;
+ endIndex: number;
+ top: number;
+ height: number;
+}
+
+function groupIntoRows(
+ boxes: Array<{ width: number; height: number; top: number; left: number }>
+): RowData[] {
+ if (boxes.length === 0) return [];
+
+ const rows: RowData[] = [];
+ let currentRowTop = boxes[0].top;
+ let currentRowStart = 0;
+ let currentRowHeight = boxes[0].height;
+
+ for (let i = 1; i < boxes.length; i++) {
+ const box = boxes[i];
+ if (Math.abs(box.top - currentRowTop) > 1) {
+ rows.push({
+ startIndex: currentRowStart,
+ endIndex: i,
+ top: currentRowTop,
+ height: currentRowHeight,
+ });
+ currentRowStart = i;
+ currentRowTop = box.top;
+ currentRowHeight = box.height;
+ } else {
+ currentRowHeight = Math.max(currentRowHeight, box.height);
+ }
+ }
+
+ rows.push({
+ startIndex: currentRowStart,
+ endIndex: boxes.length,
+ top: currentRowTop,
+ height: currentRowHeight,
+ });
+
+ return rows;
+}
+
+function hasVideo(sample: Sample): boolean {
+ const metadata = sample.metadata as Record
;
+ const videoPath = metadata.video_path;
+ const clipLocation = metadata.clip_location;
+
+ if (typeof videoPath === "string" && videoPath.trim()) return true;
+ if (typeof clipLocation === "string" && clipLocation.trim()) return true;
+ return sample.filepath.toLowerCase().endsWith(".mp4");
+}
+
+function metadataScore(sample: Sample, key: string): string {
+ const metadata = sample.metadata as Record;
+ const value = metadata[key];
+ if (typeof value !== "number" || Number.isNaN(value)) return "—";
+ return value.toFixed(1);
+}
+
+function captionSnippet(sample: Sample): string | null {
+ const metadata = sample.metadata as Record;
+ for (const key of ["caption_answer", "first_caption", "caption_raw"]) {
+ const value = metadata[key];
+ if (typeof value === "string" && value.trim()) {
+ return value.trim();
+ }
+ }
+ return null;
+}
+
+export function VideoGrid({ samples, onLoadMore, hasMore }: VideoGridProps) {
+ const containerRef = useRef(null);
+ const [containerWidth, setContainerWidth] = useState(0);
+
+ const {
+ selectedIds,
+ isLassoSelection,
+ selectionSource,
+ toggleSelection,
+ addToSelection,
+ setHoveredId,
+ hoveredId,
+ labelFilter,
+ activeVideoId,
+ setActiveVideoId,
+ setVideoDuration,
+ previewVideoId,
+ setPreviewVideoId,
+ } = useStore();
+
+ useEffect(() => {
+ const container = containerRef.current;
+ if (!container) return;
+
+ const updateWidth = () => {
+ const width = container.clientWidth;
+ if (width > 0 && width !== containerWidth) {
+ setContainerWidth(width);
+ }
+ };
+
+ updateWidth();
+
+ const resizeObserver = new ResizeObserver(() => {
+ requestAnimationFrame(updateWidth);
+ });
+ resizeObserver.observe(container);
+
+ return () => resizeObserver.disconnect();
+ }, [containerWidth]);
+
+ const { boxes, containerHeight } = useMemo(
+ () => computeLayout(samples, containerWidth),
+ [samples, containerWidth]
+ );
+
+ const rows = useMemo(() => groupIntoRows(boxes), [boxes]);
+
+ const virtualizer = useVirtualizer({
+ count: rows.length,
+ getScrollElement: () => containerRef.current,
+ estimateSize: (index) => rows[index]?.height ?? TARGET_ROW_HEIGHT,
+ overscan: 3,
+ getItemKey: (index) => {
+ const row = rows[index];
+ if (!row) return `row-${index}`;
+ const rowSamples = samples.slice(row.startIndex, row.endIndex);
+ return rowSamples.map((s) => s.id).join("-") || `row-${index}`;
+ },
+ });
+
+ useEffect(() => {
+ const container = containerRef.current;
+ if (!container || !onLoadMore || !hasMore) return;
+
+ const handleScroll = () => {
+ const { scrollTop, scrollHeight, clientHeight } = container;
+ if (scrollHeight - scrollTop - clientHeight < 500) {
+ onLoadMore();
+ }
+ };
+
+ container.addEventListener("scroll", handleScroll);
+ return () => container.removeEventListener("scroll", handleScroll);
+ }, [onLoadMore, hasMore]);
+
+ useEffect(() => {
+ containerRef.current?.scrollTo({ top: 0 });
+ }, [labelFilter]);
+
+ useEffect(() => {
+ if (isLassoSelection) return;
+ if (selectionSource !== "scatter") return;
+ if (selectedIds.size === 0) return;
+
+ try {
+ virtualizer.scrollToIndex(0, { align: "start" });
+ } catch {
+ containerRef.current?.scrollTo({ top: 0 });
+ }
+ }, [isLassoSelection, selectedIds, selectionSource, virtualizer]);
+
+ const handleClick = useCallback(
+ (sample: Sample, event: React.MouseEvent) => {
+ setActiveVideoId(sample.id);
+
+ if (event.metaKey || event.ctrlKey) {
+ toggleSelection(sample.id);
+ } else if (event.shiftKey && selectedIds.size > 0) {
+ const selectedArray = Array.from(selectedIds);
+ const lastSelected = selectedArray[selectedArray.length - 1];
+ const lastIndex = samples.findIndex((s) => s.id === lastSelected);
+ const currentIndex = samples.findIndex((s) => s.id === sample.id);
+
+ if (lastIndex !== -1 && currentIndex !== -1) {
+ const start = Math.min(lastIndex, currentIndex);
+ const end = Math.max(lastIndex, currentIndex);
+ const rangeIds = samples.slice(start, end + 1).map((s) => s.id);
+ addToSelection(rangeIds);
+ }
+ } else {
+ const newSet = new Set();
+ newSet.add(sample.id);
+ useStore.getState().setSelectedIds(newSet, "grid");
+ }
+ },
+ [samples, selectedIds, toggleSelection, addToSelection, setActiveVideoId]
+ );
+
+ const virtualRows = virtualizer.getVirtualItems();
+
+ return (
+
+
+
+
+ {virtualRows.map((virtualRow) => {
+ const row = rows[virtualRow.index];
+ if (!row) return null;
+
+ const rowSamples = samples.slice(row.startIndex, row.endIndex);
+ const rowBoxes = boxes.slice(row.startIndex, row.endIndex);
+
+ return (
+
+ {rowSamples.map((sample, i) => {
+ const box = rowBoxes[i];
+ if (!box) return null;
+
+ const isSelected = isLassoSelection ? true : selectedIds.has(sample.id);
+ const isHovered = hoveredId === sample.id;
+ const isVideo = hasVideo(sample);
+ const showPreview = isVideo && previewVideoId === sample.id;
+ const isActiveVideo = activeVideoId === sample.id;
+ const caption = captionSnippet(sample);
+
+ return (
+
handleClick(sample, e)}
+ onMouseEnter={() => {
+ setHoveredId(sample.id);
+ setPreviewVideoId(isVideo ? sample.id : null);
+ }}
+ onMouseLeave={() => {
+ setHoveredId(null);
+ if (previewVideoId === sample.id) {
+ setPreviewVideoId(null);
+ }
+ }}
+ >
+ {showPreview ? (
+
{
+ setVideoDuration(sample.id, event.currentTarget.duration || 0);
+ }}
+ />
+ ) : sample.thumbnail ? (
+ // eslint-disable-next-line @next/next/no-img-element
+
+ ) : (
+
+ No preview
+
+ )}
+
+ {isVideo && (
+
+ )}
+
+
+ A {metadataScore(sample, "aesthetic_score")} · M {metadataScore(sample, "motion_score")}
+
+
+ {caption && (
+
+
+ {caption}
+
+
+ )}
+
+ {isSelected && (
+
+
+
+ )}
+
+ );
+ })}
+
+ );
+ })}
+
+
+
+
+ );
+}
diff --git a/frontend/src/components/VideoPanel.tsx b/frontend/src/components/VideoPanel.tsx
new file mode 100644
index 0000000..bae5717
--- /dev/null
+++ b/frontend/src/components/VideoPanel.tsx
@@ -0,0 +1,194 @@
+"use client";
+
+import { useEffect, useMemo, useRef, useState } from "react";
+import { Panel } from "./Panel";
+import { fetchAnnotations, getVideoUrl } from "@/lib/api";
+import { useStore } from "@/store/useStore";
+import type { VideoAnnotation } from "@/types";
+
+interface VideoPanelProps {
+ className?: string;
+ sampleId?: string;
+}
+
+function formatScore(value: number | null): string {
+ if (typeof value !== "number" || Number.isNaN(value)) return "—";
+ return value.toFixed(2);
+}
+
+export function VideoPanel({ className = "", sampleId }: VideoPanelProps) {
+ const {
+ selectedIds,
+ activeVideoId,
+ setActiveVideoId,
+ globalSeekTime,
+ setGlobalSeekTime,
+ isTimelinePlaying,
+ setTimelinePlaying,
+ setVideoDuration,
+ annotationCache,
+ cacheAnnotation,
+ } = useStore();
+
+ const videoRef = useRef(null);
+
+ const activeSampleId = useMemo(() => {
+ if (sampleId) return sampleId;
+ if (activeVideoId) return activeVideoId;
+ if (selectedIds.size === 0) return null;
+ return Array.from(selectedIds)[0] ?? null;
+ }, [activeVideoId, sampleId, selectedIds]);
+
+ const [annotation, setAnnotation] = useState(null);
+ const [isLoading, setIsLoading] = useState(false);
+ const [error, setError] = useState(null);
+ const [videoFailed, setVideoFailed] = useState(false);
+
+ useEffect(() => {
+ setVideoFailed(false);
+ }, [activeSampleId]);
+
+ useEffect(() => {
+ if (!activeSampleId) return;
+ if (activeVideoId === activeSampleId) return;
+ setActiveVideoId(activeSampleId);
+ }, [activeSampleId, activeVideoId, setActiveVideoId]);
+
+ useEffect(() => {
+ if (!activeSampleId) {
+ setAnnotation(null);
+ setError(null);
+ setTimelinePlaying(false);
+ return;
+ }
+
+ const cached = annotationCache[activeSampleId];
+ if (cached) {
+ setAnnotation(cached);
+ setError(null);
+ setIsLoading(false);
+ return;
+ }
+
+ let cancelled = false;
+ setIsLoading(true);
+ setError(null);
+
+ fetchAnnotations(activeSampleId)
+ .then((data) => {
+ if (cancelled) return;
+ setAnnotation(data);
+ cacheAnnotation(activeSampleId, data);
+ })
+ .catch((err) => {
+ if (cancelled) return;
+ setAnnotation(null);
+ setError(err instanceof Error ? err.message : "Failed to load annotations");
+ })
+ .finally(() => {
+ if (cancelled) return;
+ setIsLoading(false);
+ });
+
+ return () => {
+ cancelled = true;
+ };
+ }, [activeSampleId, annotationCache, cacheAnnotation, setTimelinePlaying]);
+
+ useEffect(() => {
+ const video = videoRef.current;
+ if (!video) return;
+
+ const delta = Math.abs(video.currentTime - globalSeekTime);
+ if (!isTimelinePlaying && delta > 0.08) {
+ video.currentTime = globalSeekTime;
+ }
+
+ if (isTimelinePlaying) {
+ void video.play();
+ } else {
+ video.pause();
+ }
+ }, [activeSampleId, globalSeekTime, isTimelinePlaying]);
+
+ if (!activeSampleId) {
+ return (
+
+
+ Select a clip preview in the grid or scatter plot to open its video.
+
+
+ );
+ }
+
+ return (
+
+
+ {!videoFailed ? (
+
{
+ const duration = event.currentTarget.duration || 0;
+ setVideoDuration(activeSampleId, duration);
+ if (globalSeekTime > duration && duration > 0) {
+ setGlobalSeekTime(duration);
+ }
+ }}
+ onTimeUpdate={(event) => {
+ if (!isTimelinePlaying) return;
+ setGlobalSeekTime(event.currentTarget.currentTime);
+ }}
+ onSeeked={(event) => {
+ setGlobalSeekTime(event.currentTarget.currentTime);
+ }}
+ onEnded={() => setTimelinePlaying(false)}
+ onError={() => {
+ setVideoFailed(true);
+ setTimelinePlaying(false);
+ }}
+ />
+ ) : (
+
+ Video failed to load for this clip.
+
+ )}
+
+
+
+
+ aesthetic: {formatScore(annotation?.aesthetic_score ?? null)}
+ motion: {formatScore(annotation?.motion_score ?? null)}
+
+ dedup: {annotation?.dedup_status ?? "unknown"}
+
+
+ sim: {formatScore(annotation?.cosine_sim_score ?? null)}
+
+
+
+ {isLoading ? (
+
Loading annotations…
+ ) : error ? (
+
{error}
+ ) : (
+ <>
+
+ {annotation?.caption ?? annotation?.raw_caption ?? "No caption available."}
+
+ {annotation?.reasoning ? (
+
+ Reasoning
+ {annotation.reasoning}
+
+ ) : null}
+ >
+ )}
+
+
+ );
+}
diff --git a/frontend/src/components/icons.tsx b/frontend/src/components/icons.tsx
new file mode 100644
index 0000000..46494f7
--- /dev/null
+++ b/frontend/src/components/icons.tsx
@@ -0,0 +1,68 @@
+"use client";
+
+/**
+ * Shared icons for HyperView UI.
+ * Using inline SVGs for simplicity (no extra icon library dependency).
+ */
+
+export const GridIcon = () => (
+
+
+
+
+
+
+);
+
+export const ScatterIcon = () => (
+
+
+
+
+
+
+
+);
+
+export const HyperViewLogo = ({ className = "w-5 h-5" }: { className?: string }) => (
+
+
+
+
+
+);
+
+export const CheckIcon = () => (
+
+
+
+);
+
+/** Euclidean geometry icon - flat grid */
+export const EuclideanIcon = () => (
+
+
+
+
+
+);
+
+/** Poincaré disk icon - hyperbolic geometry */
+export const PoincareIcon = () => (
+
+
+
+
+
+);
+
+/** Spherical geometry icon - for future use */
+export const SphericalIcon = () => (
+
+
+
+
+
+);
+
+
diff --git a/frontend/src/components/index.ts b/frontend/src/components/index.ts
index 1955b11..370b265 100644
--- a/frontend/src/components/index.ts
+++ b/frontend/src/components/index.ts
@@ -1,3 +1,15 @@
export { ImageGrid } from "./ImageGrid";
+export { VideoGrid } from "./VideoGrid";
export { ScatterPanel } from "./ScatterPanel";
+export { DockviewWorkspace } from "./DockviewWorkspace";
export { Header } from "./Header";
+export { GlobalSeekBar } from "./GlobalSeekBar";
+export { Panel, PanelFooter } from "./Panel";
+export { PanelHeader } from "./PanelHeader";
+export { PanelTitle } from "./PanelTitle";
+export { PanelContextBar } from "./PanelContextBar";
+export { ExplorerPanel } from "./ExplorerPanel";
+export { PlaceholderPanel } from "./PlaceholderPanel";
+export { VideoPanel } from "./VideoPanel";
+export { CurationPanel } from "./CurationPanel";
+export * from "./icons";
diff --git a/frontend/src/components/ui/button.tsx b/frontend/src/components/ui/button.tsx
new file mode 100644
index 0000000..65d4fcd
--- /dev/null
+++ b/frontend/src/components/ui/button.tsx
@@ -0,0 +1,57 @@
+import * as React from "react"
+import { Slot } from "@radix-ui/react-slot"
+import { cva, type VariantProps } from "class-variance-authority"
+
+import { cn } from "@/lib/utils"
+
+const buttonVariants = cva(
+ "inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
+ {
+ variants: {
+ variant: {
+ default:
+ "bg-primary text-primary-foreground shadow hover:bg-primary/90",
+ destructive:
+ "bg-destructive text-destructive-foreground shadow-sm hover:bg-destructive/90",
+ outline:
+ "border border-input bg-background shadow-sm hover:bg-accent hover:text-accent-foreground",
+ secondary:
+ "bg-secondary text-secondary-foreground shadow-sm hover:bg-secondary/80",
+ ghost: "hover:bg-accent hover:text-accent-foreground",
+ link: "text-primary underline-offset-4 hover:underline",
+ },
+ size: {
+ default: "h-9 px-4 py-2",
+ sm: "h-8 rounded-md px-3 text-xs",
+ lg: "h-10 rounded-md px-8",
+ icon: "h-9 w-9",
+ },
+ },
+ defaultVariants: {
+ variant: "default",
+ size: "default",
+ },
+ }
+)
+
+export interface ButtonProps
+ extends React.ButtonHTMLAttributes,
+ VariantProps {
+ asChild?: boolean
+}
+
+const Button = React.forwardRef(
+ ({ className, variant, size, asChild = false, ...props }, ref) => {
+ const Comp = asChild ? Slot : "button"
+ return (
+
+ )
+ }
+)
+Button.displayName = "Button"
+
+export { Button, buttonVariants }
diff --git a/frontend/src/components/ui/collapsible.tsx b/frontend/src/components/ui/collapsible.tsx
new file mode 100644
index 0000000..9fa4894
--- /dev/null
+++ b/frontend/src/components/ui/collapsible.tsx
@@ -0,0 +1,11 @@
+"use client"
+
+import * as CollapsiblePrimitive from "@radix-ui/react-collapsible"
+
+const Collapsible = CollapsiblePrimitive.Root
+
+const CollapsibleTrigger = CollapsiblePrimitive.CollapsibleTrigger
+
+const CollapsibleContent = CollapsiblePrimitive.CollapsibleContent
+
+export { Collapsible, CollapsibleTrigger, CollapsibleContent }
diff --git a/frontend/src/components/ui/command.tsx b/frontend/src/components/ui/command.tsx
new file mode 100644
index 0000000..2cecd91
--- /dev/null
+++ b/frontend/src/components/ui/command.tsx
@@ -0,0 +1,153 @@
+"use client"
+
+import * as React from "react"
+import { type DialogProps } from "@radix-ui/react-dialog"
+import { Command as CommandPrimitive } from "cmdk"
+import { Search } from "lucide-react"
+
+import { cn } from "@/lib/utils"
+import { Dialog, DialogContent } from "@/components/ui/dialog"
+
+const Command = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+Command.displayName = CommandPrimitive.displayName
+
+const CommandDialog = ({ children, ...props }: DialogProps) => {
+ return (
+
+
+
+ {children}
+
+
+
+ )
+}
+
+const CommandInput = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+
+
+
+))
+
+CommandInput.displayName = CommandPrimitive.Input.displayName
+
+const CommandList = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+
+CommandList.displayName = CommandPrimitive.List.displayName
+
+const CommandEmpty = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>((props, ref) => (
+
+))
+
+CommandEmpty.displayName = CommandPrimitive.Empty.displayName
+
+const CommandGroup = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+
+CommandGroup.displayName = CommandPrimitive.Group.displayName
+
+const CommandSeparator = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+CommandSeparator.displayName = CommandPrimitive.Separator.displayName
+
+const CommandItem = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+
+CommandItem.displayName = CommandPrimitive.Item.displayName
+
+const CommandShortcut = ({
+ className,
+ ...props
+}: React.HTMLAttributes) => {
+ return (
+
+ )
+}
+CommandShortcut.displayName = "CommandShortcut"
+
+export {
+ Command,
+ CommandDialog,
+ CommandInput,
+ CommandList,
+ CommandEmpty,
+ CommandGroup,
+ CommandItem,
+ CommandShortcut,
+ CommandSeparator,
+}
diff --git a/frontend/src/components/ui/dialog.tsx b/frontend/src/components/ui/dialog.tsx
new file mode 100644
index 0000000..1647513
--- /dev/null
+++ b/frontend/src/components/ui/dialog.tsx
@@ -0,0 +1,122 @@
+"use client"
+
+import * as React from "react"
+import * as DialogPrimitive from "@radix-ui/react-dialog"
+import { X } from "lucide-react"
+
+import { cn } from "@/lib/utils"
+
+const Dialog = DialogPrimitive.Root
+
+const DialogTrigger = DialogPrimitive.Trigger
+
+const DialogPortal = DialogPrimitive.Portal
+
+const DialogClose = DialogPrimitive.Close
+
+const DialogOverlay = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+DialogOverlay.displayName = DialogPrimitive.Overlay.displayName
+
+const DialogContent = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, children, ...props }, ref) => (
+
+
+
+ {children}
+
+
+ Close
+
+
+
+))
+DialogContent.displayName = DialogPrimitive.Content.displayName
+
+const DialogHeader = ({
+ className,
+ ...props
+}: React.HTMLAttributes) => (
+
+)
+DialogHeader.displayName = "DialogHeader"
+
+const DialogFooter = ({
+ className,
+ ...props
+}: React.HTMLAttributes) => (
+
+)
+DialogFooter.displayName = "DialogFooter"
+
+const DialogTitle = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+DialogTitle.displayName = DialogPrimitive.Title.displayName
+
+const DialogDescription = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+DialogDescription.displayName = DialogPrimitive.Description.displayName
+
+export {
+ Dialog,
+ DialogPortal,
+ DialogOverlay,
+ DialogTrigger,
+ DialogClose,
+ DialogContent,
+ DialogHeader,
+ DialogFooter,
+ DialogTitle,
+ DialogDescription,
+}
diff --git a/frontend/src/components/ui/dropdown-menu.tsx b/frontend/src/components/ui/dropdown-menu.tsx
new file mode 100644
index 0000000..cd9a158
--- /dev/null
+++ b/frontend/src/components/ui/dropdown-menu.tsx
@@ -0,0 +1,201 @@
+"use client"
+
+import * as React from "react"
+import * as DropdownMenuPrimitive from "@radix-ui/react-dropdown-menu"
+import { Check, ChevronRight, Circle } from "lucide-react"
+
+import { cn } from "@/lib/utils"
+
+const DropdownMenu = DropdownMenuPrimitive.Root
+
+const DropdownMenuTrigger = DropdownMenuPrimitive.Trigger
+
+const DropdownMenuGroup = DropdownMenuPrimitive.Group
+
+const DropdownMenuPortal = DropdownMenuPrimitive.Portal
+
+const DropdownMenuSub = DropdownMenuPrimitive.Sub
+
+const DropdownMenuRadioGroup = DropdownMenuPrimitive.RadioGroup
+
+const DropdownMenuSubTrigger = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef & {
+ inset?: boolean
+ }
+>(({ className, inset, children, ...props }, ref) => (
+
+ {children}
+
+
+))
+DropdownMenuSubTrigger.displayName =
+ DropdownMenuPrimitive.SubTrigger.displayName
+
+const DropdownMenuSubContent = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+DropdownMenuSubContent.displayName =
+ DropdownMenuPrimitive.SubContent.displayName
+
+const DropdownMenuContent = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, sideOffset = 3, ...props }, ref) => (
+
+
+
+))
+DropdownMenuContent.displayName = DropdownMenuPrimitive.Content.displayName
+
+const DropdownMenuItem = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef & {
+ inset?: boolean
+ }
+>(({ className, inset, ...props }, ref) => (
+ svg]:size-3.5 [&>svg]:shrink-0",
+ inset && "pl-8",
+ className
+ )}
+ {...props}
+ />
+))
+DropdownMenuItem.displayName = DropdownMenuPrimitive.Item.displayName
+
+const DropdownMenuCheckboxItem = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, children, checked, ...props }, ref) => (
+
+
+
+
+
+
+ {children}
+
+))
+DropdownMenuCheckboxItem.displayName =
+ DropdownMenuPrimitive.CheckboxItem.displayName
+
+const DropdownMenuRadioItem = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, children, ...props }, ref) => (
+
+
+
+
+
+
+ {children}
+
+))
+DropdownMenuRadioItem.displayName = DropdownMenuPrimitive.RadioItem.displayName
+
+const DropdownMenuLabel = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef & {
+ inset?: boolean
+ }
+>(({ className, inset, ...props }, ref) => (
+
+))
+DropdownMenuLabel.displayName = DropdownMenuPrimitive.Label.displayName
+
+const DropdownMenuSeparator = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+DropdownMenuSeparator.displayName = DropdownMenuPrimitive.Separator.displayName
+
+const DropdownMenuShortcut = ({
+ className,
+ ...props
+}: React.HTMLAttributes) => {
+ return (
+
+ )
+}
+DropdownMenuShortcut.displayName = "DropdownMenuShortcut"
+
+export {
+ DropdownMenu,
+ DropdownMenuTrigger,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuCheckboxItem,
+ DropdownMenuRadioItem,
+ DropdownMenuLabel,
+ DropdownMenuSeparator,
+ DropdownMenuShortcut,
+ DropdownMenuGroup,
+ DropdownMenuPortal,
+ DropdownMenuSub,
+ DropdownMenuSubContent,
+ DropdownMenuSubTrigger,
+ DropdownMenuRadioGroup,
+}
diff --git a/frontend/src/components/ui/popover.tsx b/frontend/src/components/ui/popover.tsx
new file mode 100644
index 0000000..70a28f6
--- /dev/null
+++ b/frontend/src/components/ui/popover.tsx
@@ -0,0 +1,33 @@
+"use client"
+
+import * as React from "react"
+import * as PopoverPrimitive from "@radix-ui/react-popover"
+
+import { cn } from "@/lib/utils"
+
+const Popover = PopoverPrimitive.Root
+
+const PopoverTrigger = PopoverPrimitive.Trigger
+
+const PopoverAnchor = PopoverPrimitive.Anchor
+
+const PopoverContent = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, align = "center", sideOffset = 4, ...props }, ref) => (
+
+
+
+))
+PopoverContent.displayName = PopoverPrimitive.Content.displayName
+
+export { Popover, PopoverTrigger, PopoverContent, PopoverAnchor }
diff --git a/frontend/src/components/ui/scroll-area.tsx b/frontend/src/components/ui/scroll-area.tsx
new file mode 100644
index 0000000..b1e223f
--- /dev/null
+++ b/frontend/src/components/ui/scroll-area.tsx
@@ -0,0 +1,49 @@
+"use client"
+
+import * as React from "react"
+import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area"
+
+import { cn } from "@/lib/utils"
+
+const ScrollArea = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, children, ...props }, ref) => (
+
+
+ {children}
+
+
+
+
+))
+ScrollArea.displayName = ScrollAreaPrimitive.Root.displayName
+
+const ScrollBar = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, orientation = "vertical", ...props }, ref) => (
+
+
+
+))
+ScrollBar.displayName = ScrollAreaPrimitive.ScrollAreaScrollbar.displayName
+
+export { ScrollArea, ScrollBar }
diff --git a/frontend/src/components/ui/separator.tsx b/frontend/src/components/ui/separator.tsx
new file mode 100644
index 0000000..12d81c4
--- /dev/null
+++ b/frontend/src/components/ui/separator.tsx
@@ -0,0 +1,31 @@
+"use client"
+
+import * as React from "react"
+import * as SeparatorPrimitive from "@radix-ui/react-separator"
+
+import { cn } from "@/lib/utils"
+
+const Separator = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(
+ (
+ { className, orientation = "horizontal", decorative = true, ...props },
+ ref
+ ) => (
+
+ )
+)
+Separator.displayName = SeparatorPrimitive.Root.displayName
+
+export { Separator }
diff --git a/frontend/src/components/ui/toggle-group.tsx b/frontend/src/components/ui/toggle-group.tsx
new file mode 100644
index 0000000..1c876bb
--- /dev/null
+++ b/frontend/src/components/ui/toggle-group.tsx
@@ -0,0 +1,61 @@
+"use client"
+
+import * as React from "react"
+import * as ToggleGroupPrimitive from "@radix-ui/react-toggle-group"
+import { type VariantProps } from "class-variance-authority"
+
+import { cn } from "@/lib/utils"
+import { toggleVariants } from "@/components/ui/toggle"
+
+const ToggleGroupContext = React.createContext<
+ VariantProps
+>({
+ size: "default",
+ variant: "default",
+})
+
+const ToggleGroup = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef &
+ VariantProps
+>(({ className, variant, size, children, ...props }, ref) => (
+
+
+ {children}
+
+
+))
+
+ToggleGroup.displayName = ToggleGroupPrimitive.Root.displayName
+
+const ToggleGroupItem = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef &
+ VariantProps
+>(({ className, children, variant, size, ...props }, ref) => {
+ const context = React.useContext(ToggleGroupContext)
+
+ return (
+
+ {children}
+
+ )
+})
+
+ToggleGroupItem.displayName = ToggleGroupPrimitive.Item.displayName
+
+export { ToggleGroup, ToggleGroupItem }
diff --git a/frontend/src/components/ui/toggle.tsx b/frontend/src/components/ui/toggle.tsx
new file mode 100644
index 0000000..e516f21
--- /dev/null
+++ b/frontend/src/components/ui/toggle.tsx
@@ -0,0 +1,45 @@
+"use client"
+
+import * as React from "react"
+import * as TogglePrimitive from "@radix-ui/react-toggle"
+import { cva, type VariantProps } from "class-variance-authority"
+
+import { cn } from "@/lib/utils"
+
+const toggleVariants = cva(
+ "inline-flex items-center justify-center gap-2 rounded-md text-sm font-medium transition-colors hover:bg-muted hover:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:pointer-events-none disabled:opacity-50 data-[state=on]:bg-primary data-[state=on]:text-primary-foreground [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
+ {
+ variants: {
+ variant: {
+ default: "bg-transparent",
+ outline:
+ "border border-input bg-transparent shadow-sm hover:bg-accent hover:text-accent-foreground data-[state=on]:border-primary",
+ },
+ size: {
+ default: "h-9 px-2 min-w-9",
+ sm: "h-8 px-1.5 min-w-8",
+ lg: "h-10 px-2.5 min-w-10",
+ },
+ },
+ defaultVariants: {
+ variant: "default",
+ size: "default",
+ },
+ }
+)
+
+const Toggle = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef &
+ VariantProps
+>(({ className, variant, size, ...props }, ref) => (
+
+))
+
+Toggle.displayName = TogglePrimitive.Root.displayName
+
+export { Toggle, toggleVariants }
diff --git a/frontend/src/components/ui/tooltip.tsx b/frontend/src/components/ui/tooltip.tsx
new file mode 100644
index 0000000..28e1918
--- /dev/null
+++ b/frontend/src/components/ui/tooltip.tsx
@@ -0,0 +1,32 @@
+"use client"
+
+import * as React from "react"
+import * as TooltipPrimitive from "@radix-ui/react-tooltip"
+
+import { cn } from "@/lib/utils"
+
+const TooltipProvider = TooltipPrimitive.Provider
+
+const Tooltip = TooltipPrimitive.Root
+
+const TooltipTrigger = TooltipPrimitive.Trigger
+
+const TooltipContent = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, sideOffset = 4, ...props }, ref) => (
+
+
+
+))
+TooltipContent.displayName = TooltipPrimitive.Content.displayName
+
+export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }
diff --git a/frontend/src/components/useHyperScatter.ts b/frontend/src/components/useHyperScatter.ts
new file mode 100644
index 0000000..3801a70
--- /dev/null
+++ b/frontend/src/components/useHyperScatter.ts
@@ -0,0 +1,614 @@
+import type React from "react";
+import { useCallback, useEffect, useMemo, useRef, useState } from "react";
+
+import type { EmbeddingsData } from "@/types";
+import type { ScatterLabelsInfo } from "@/lib/labelLegend";
+import type { Dataset, GeometryMode, Modifiers, Renderer } from "hyper-scatter";
+
+type HyperScatterModule = typeof import("hyper-scatter");
+
+const MAX_LASSO_VERTS = 512;
+
+function supportsWebGL2(): boolean {
+ try {
+ if (typeof document === "undefined") return false;
+ const canvas = document.createElement("canvas");
+ return !!canvas.getContext("webgl2");
+ } catch {
+ return false;
+ }
+}
+
+function capInterleavedXY(points: ArrayLike, maxVerts: number): number[] {
+ const n = Math.floor(points.length / 2);
+ if (n <= maxVerts) return Array.from(points as ArrayLike);
+
+ const out = new Array(maxVerts * 2);
+ for (let i = 0; i < maxVerts; i++) {
+ const src = Math.floor((i * n) / maxVerts);
+ out[i * 2] = points[src * 2];
+ out[i * 2 + 1] = points[src * 2 + 1];
+ }
+ return out;
+}
+
+
+interface UseHyperScatterArgs {
+ embeddings: EmbeddingsData | null;
+ labelsInfo: ScatterLabelsInfo | null;
+ selectedIds: Set;
+ hoveredId: string | null;
+ setSelectedIds: (ids: Set, source?: "scatter" | "grid") => void;
+ beginLassoSelection: (query: { layoutKey: string; polygon: number[] }) => void;
+ setHoveredId: (id: string | null) => void;
+ hoverEnabled?: boolean;
+}
+
+function toModifiers(e: { shiftKey: boolean; ctrlKey: boolean; altKey: boolean; metaKey: boolean }): Modifiers {
+ return {
+ shift: e.shiftKey,
+ ctrl: e.ctrlKey,
+ alt: e.altKey,
+ meta: e.metaKey,
+ };
+}
+
+function clearOverlay(canvas: HTMLCanvasElement | null): void {
+ if (!canvas) return;
+ const ctx = canvas.getContext("2d");
+ if (!ctx) return;
+ ctx.setTransform(1, 0, 0, 1, 0, 0);
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+}
+
+function drawLassoOverlay(canvas: HTMLCanvasElement | null, points: number[]): void {
+ if (!canvas) return;
+ const ctx = canvas.getContext("2d");
+ if (!ctx) return;
+
+ clearOverlay(canvas);
+ if (points.length < 6) return;
+
+ ctx.save();
+ ctx.lineWidth = 2;
+ ctx.strokeStyle = "rgba(79,70,229,0.9)"; // indigo-ish
+ ctx.fillStyle = "rgba(79,70,229,0.15)";
+
+ ctx.beginPath();
+ ctx.moveTo(points[0], points[1]);
+ for (let i = 2; i < points.length; i += 2) {
+ ctx.lineTo(points[i], points[i + 1]);
+ }
+ ctx.closePath();
+ ctx.fill();
+ ctx.stroke();
+ ctx.restore();
+}
+
+export function useHyperScatter({
+ embeddings,
+ labelsInfo,
+ selectedIds,
+ hoveredId,
+ setSelectedIds,
+ beginLassoSelection,
+ setHoveredId,
+ hoverEnabled = true,
+}: UseHyperScatterArgs) {
+ const canvasRef = useRef(null);
+ const overlayCanvasRef = useRef(null);
+ const containerRef = useRef(null);
+
+ const rendererRef = useRef(null);
+
+ const [rendererError, setRendererError] = useState(null);
+
+ const rafPendingRef = useRef(false);
+
+ // Interaction state (refs to avoid rerender churn)
+ const isPanningRef = useRef(false);
+ const isLassoingRef = useRef(false);
+ const pointerDownXRef = useRef(0);
+ const pointerDownYRef = useRef(0);
+ const lastPointerXRef = useRef(0);
+ const lastPointerYRef = useRef(0);
+ const lassoPointsRef = useRef([]);
+ const persistentLassoRef = useRef(null);
+
+ const hoveredIndexRef = useRef(-1);
+
+ const idToIndex = useMemo(() => {
+ if (!embeddings) return null;
+ const m = new Map();
+ for (let i = 0; i < embeddings.ids.length; i++) {
+ m.set(embeddings.ids[i], i);
+ }
+ return m;
+ }, [embeddings]);
+
+ const requestRender = useCallback(() => {
+ if (rafPendingRef.current) return;
+ rafPendingRef.current = true;
+
+ requestAnimationFrame(() => {
+ rafPendingRef.current = false;
+ const renderer = rendererRef.current;
+ if (!renderer) return;
+
+ try {
+ renderer.render();
+ } catch (err) {
+ // Avoid an exception storm that would permanently prevent the UI from updating.
+ console.error("hyper-scatter renderer.render() failed:", err);
+ try {
+ renderer.destroy();
+ } catch {
+ // ignore
+ }
+ rendererRef.current = null;
+ setRendererError(
+ "This browser can't render the scatter plot (WebGL2 is required). Please use Chrome/Edge/Firefox."
+ );
+ clearOverlay(overlayCanvasRef.current);
+ return;
+ }
+
+ if (isLassoingRef.current) {
+ drawLassoOverlay(overlayCanvasRef.current, lassoPointsRef.current);
+ }
+ });
+ }, []);
+
+ const getCanvasPos = useCallback((e: { clientX: number; clientY: number }) => {
+ const canvas = canvasRef.current;
+ if (!canvas) return { x: 0, y: 0 };
+ const rect = canvas.getBoundingClientRect();
+ return {
+ x: e.clientX - rect.left,
+ y: e.clientY - rect.top,
+ };
+ }, []);
+
+ const redrawOverlay = useCallback(() => {
+ if (!overlayCanvasRef.current) return;
+ clearOverlay(overlayCanvasRef.current);
+ const persistent = persistentLassoRef.current;
+ if (persistent && persistent.length >= 6) {
+ drawLassoOverlay(overlayCanvasRef.current, persistent);
+ }
+ }, []);
+
+ const clearPersistentLasso = useCallback(() => {
+ persistentLassoRef.current = null;
+ clearOverlay(overlayCanvasRef.current);
+ }, []);
+
+ const stopInteraction = useCallback(() => {
+ isPanningRef.current = false;
+ isLassoingRef.current = false;
+ lassoPointsRef.current = [];
+ if (persistentLassoRef.current) {
+ redrawOverlay();
+ return;
+ }
+ clearOverlay(overlayCanvasRef.current);
+ }, [redrawOverlay]);
+
+ // Initialize renderer when embeddings change.
+ useEffect(() => {
+ if (!embeddings || !labelsInfo) return;
+ if (!canvasRef.current || !containerRef.current) return;
+
+ let cancelled = false;
+
+ const init = async () => {
+ // Clear any previous renderer errors when we attempt to re-init.
+ setRendererError(null);
+
+ if (!supportsWebGL2()) {
+ setRendererError(
+ "This browser doesn't support WebGL2, so the scatter plot can't be displayed. Please use Chrome/Edge/Firefox."
+ );
+ return;
+ }
+
+ try {
+ const viz = (await import("hyper-scatter")) as HyperScatterModule;
+ if (cancelled) return;
+
+ const container = containerRef.current;
+ const canvas = canvasRef.current;
+ if (!container || !canvas) return;
+
+ // Destroy existing renderer (if any)
+ if (rendererRef.current) {
+ rendererRef.current.destroy();
+ rendererRef.current = null;
+ }
+
+ const rect = container.getBoundingClientRect();
+ const width = Math.floor(rect.width);
+ const height = Math.floor(rect.height);
+ if (overlayCanvasRef.current) {
+ overlayCanvasRef.current.width = Math.max(1, width);
+ overlayCanvasRef.current.height = Math.max(1, height);
+ overlayCanvasRef.current.style.width = `${width}px`;
+ overlayCanvasRef.current.style.height = `${height}px`;
+ redrawOverlay();
+ }
+
+ // Use coords from embeddings response directly
+ const coords = embeddings.coords;
+ const positions = new Float32Array(coords.length * 2);
+ for (let i = 0; i < coords.length; i++) {
+ positions[i * 2] = coords[i][0];
+ positions[i * 2 + 1] = coords[i][1];
+ }
+
+ const geometry = embeddings.geometry as GeometryMode;
+ const dataset: Dataset = viz.createDataset(geometry, positions, labelsInfo.categories);
+
+ const opts = {
+ width,
+ height,
+ devicePixelRatio: window.devicePixelRatio,
+ pointRadius: 4,
+ colors: labelsInfo.palette,
+ backgroundColor: "#161b22", // Match HyperView theme: --card is #161b22
+ };
+
+ const renderer: Renderer =
+ geometry === "euclidean" ? new viz.EuclideanWebGLCandidate() : new viz.HyperbolicWebGLCandidate();
+
+ renderer.init(canvas, opts);
+
+ renderer.setDataset(dataset);
+ rendererRef.current = renderer;
+
+ // Force a first render to surface WebGL2 context creation failures early.
+ try {
+ renderer.render();
+ } catch (err) {
+ console.error("hyper-scatter initial render failed:", err);
+ rendererRef.current = null;
+ try {
+ renderer.destroy();
+ } catch {
+ // ignore
+ }
+ setRendererError(
+ "This browser can't render the scatter plot (WebGL2 is required). Please use Chrome/Edge/Firefox."
+ );
+ return;
+ }
+
+ hoveredIndexRef.current = -1;
+ renderer.setHovered(-1);
+
+ requestRender();
+ } catch (err) {
+ console.error("Failed to initialize hyper-scatter renderer:", err);
+ setRendererError(
+ "Failed to initialize the scatter renderer in this browser. Please use Chrome/Edge/Firefox."
+ );
+ }
+ };
+
+ init();
+
+ return () => {
+ cancelled = true;
+ stopInteraction();
+ if (rendererRef.current) {
+ rendererRef.current.destroy();
+ rendererRef.current = null;
+ }
+ };
+ }, [embeddings, labelsInfo, redrawOverlay, requestRender, stopInteraction]);
+
+ // Store -> renderer sync
+ useEffect(() => {
+ const renderer = rendererRef.current;
+ if (!renderer || !embeddings || !idToIndex) return;
+
+ const indices = new Set();
+ for (const id of selectedIds) {
+ const idx = idToIndex.get(id);
+ if (typeof idx === "number") indices.add(idx);
+ }
+
+ renderer.setSelection(indices);
+
+ if (!hoverEnabled) {
+ renderer.setHovered(-1);
+ hoveredIndexRef.current = -1;
+ requestRender();
+ return;
+ }
+
+ const hoveredIdx = hoveredId ? (idToIndex.get(hoveredId) ?? -1) : -1;
+ renderer.setHovered(hoveredIdx);
+ hoveredIndexRef.current = hoveredIdx;
+
+ requestRender();
+ }, [embeddings, hoveredId, hoverEnabled, idToIndex, requestRender, selectedIds]);
+
+ // Resize handling
+ useEffect(() => {
+ const container = containerRef.current;
+ if (!container) return;
+
+ const resize = () => {
+ const rect = container.getBoundingClientRect();
+ const width = Math.floor(rect.width);
+ const height = Math.floor(rect.height);
+ if (!(width > 0) || !(height > 0)) return;
+
+ if (overlayCanvasRef.current) {
+ overlayCanvasRef.current.width = Math.max(1, width);
+ overlayCanvasRef.current.height = Math.max(1, height);
+ overlayCanvasRef.current.style.width = `${width}px`;
+ overlayCanvasRef.current.style.height = `${height}px`;
+ redrawOverlay();
+ }
+
+ const renderer = rendererRef.current;
+ if (renderer) {
+ renderer.resize(width, height);
+ requestRender();
+ }
+ };
+
+ resize();
+
+ const ro = new ResizeObserver(resize);
+ ro.observe(container);
+ return () => ro.disconnect();
+ }, [redrawOverlay, requestRender]);
+
+ // Wheel zoom (native listener so we can set passive:false)
+ useEffect(() => {
+ const canvas = canvasRef.current;
+ if (!canvas) return;
+
+ const onWheel = (e: WheelEvent) => {
+ const renderer = rendererRef.current;
+ if (!renderer) return;
+ e.preventDefault();
+
+ const pos = getCanvasPos(e);
+ const delta = -e.deltaY / 100;
+ renderer.zoom(pos.x, pos.y, delta, toModifiers(e));
+ requestRender();
+ };
+
+ canvas.addEventListener("wheel", onWheel, { passive: false });
+ return () => canvas.removeEventListener("wheel", onWheel);
+ }, [getCanvasPos, requestRender]);
+
+ // Pointer interactions
+ const handlePointerDown = useCallback(
+ (e: React.PointerEvent) => {
+ const renderer = rendererRef.current;
+ if (!renderer) return;
+
+ // Left button only
+ if (typeof e.button === "number" && e.button !== 0) return;
+
+ const pos = getCanvasPos(e);
+ pointerDownXRef.current = pos.x;
+ pointerDownYRef.current = pos.y;
+ lastPointerXRef.current = pos.x;
+ lastPointerYRef.current = pos.y;
+
+ if (persistentLassoRef.current) {
+ clearPersistentLasso();
+ }
+
+ // Shift-drag = lasso, otherwise pan.
+ if (e.shiftKey) {
+ isLassoingRef.current = true;
+ isPanningRef.current = false;
+ lassoPointsRef.current = [pos.x, pos.y];
+ drawLassoOverlay(overlayCanvasRef.current, lassoPointsRef.current);
+ } else {
+ isPanningRef.current = true;
+ isLassoingRef.current = false;
+ }
+
+ try {
+ e.currentTarget.setPointerCapture(e.pointerId);
+ } catch {
+ // ignore
+ }
+
+ e.preventDefault();
+ },
+ [clearPersistentLasso, getCanvasPos]
+ );
+
+ const handlePointerMove = useCallback(
+ (e: React.PointerEvent) => {
+ const renderer = rendererRef.current;
+ if (!renderer) return;
+
+ const pos = getCanvasPos(e);
+
+ if (isPanningRef.current) {
+ const dx = pos.x - lastPointerXRef.current;
+ const dy = pos.y - lastPointerYRef.current;
+ lastPointerXRef.current = pos.x;
+ lastPointerYRef.current = pos.y;
+
+ renderer.pan(dx, dy, toModifiers(e));
+ requestRender();
+ return;
+ }
+
+ if (isLassoingRef.current) {
+ const pts = lassoPointsRef.current;
+ const lastX = pts[pts.length - 2] ?? pos.x;
+ const lastY = pts[pts.length - 1] ?? pos.y;
+ const ddx = pos.x - lastX;
+ const ddy = pos.y - lastY;
+ const distSq = ddx * ddx + ddy * ddy;
+
+ // Sample at ~2px spacing
+ if (distSq >= 4) {
+ pts.push(pos.x, pos.y);
+ drawLassoOverlay(overlayCanvasRef.current, pts);
+ }
+ return;
+ }
+
+ if (!hoverEnabled) {
+ if (hoveredIndexRef.current !== -1) {
+ hoveredIndexRef.current = -1;
+ renderer.setHovered(-1);
+ requestRender();
+ }
+ return;
+ }
+
+ // Hover
+ const hit = renderer.hitTest(pos.x, pos.y);
+ const nextIndex = hit ? hit.index : -1;
+ if (nextIndex === hoveredIndexRef.current) return;
+ hoveredIndexRef.current = nextIndex;
+ renderer.setHovered(nextIndex);
+
+ if (!embeddings) return;
+ if (nextIndex >= 0 && nextIndex < embeddings.ids.length) {
+ setHoveredId(embeddings.ids[nextIndex]);
+ } else {
+ setHoveredId(null);
+ }
+
+ requestRender();
+ },
+ [embeddings, getCanvasPos, hoverEnabled, requestRender, setHoveredId]
+ );
+
+ const handlePointerUp = useCallback(
+ async (e: React.PointerEvent) => {
+ const renderer = rendererRef.current;
+ if (!renderer || !embeddings) {
+ stopInteraction();
+ return;
+ }
+
+ if (isLassoingRef.current) {
+ const pts = lassoPointsRef.current.slice();
+ persistentLassoRef.current = pts.length >= 6 ? pts : null;
+ stopInteraction();
+ redrawOverlay();
+
+ if (pts.length >= 6) {
+ try {
+ const polyline = new Float32Array(pts);
+ const result = renderer.lassoSelect(polyline);
+
+ // Enter server-driven lasso mode by sending a data-space polygon.
+ // Backend selection runs in the same coordinate system returned by /api/embeddings.
+ const dataCoords = result.geometry?.coords;
+ if (!dataCoords || dataCoords.length < 6) return;
+
+ // Clear any existing manual selection highlights immediately.
+ renderer.setSelection(new Set());
+
+ // Cap vertex count to keep request payload + backend runtime bounded.
+ const polygon = capInterleavedXY(dataCoords, MAX_LASSO_VERTS);
+ if (polygon.length < 6) return;
+
+ beginLassoSelection({ layoutKey: embeddings.layout_key, polygon });
+ } catch (err) {
+ console.error("Lasso selection failed:", err);
+ }
+ }
+
+ requestRender();
+ return;
+ }
+
+ // Click-to-select (scatter -> image grid)
+ // Only treat as a click if the pointer didn't move much (otherwise it's a pan).
+ const pos = getCanvasPos(e);
+ const dx = pos.x - pointerDownXRef.current;
+ const dy = pos.y - pointerDownYRef.current;
+ const CLICK_MAX_DIST_SQ = 36; // ~6px
+ const isClick = dx * dx + dy * dy <= CLICK_MAX_DIST_SQ;
+
+ if (isClick) {
+ const hit = renderer.hitTest(pos.x, pos.y);
+ const idx = hit ? hit.index : -1;
+
+ if (idx >= 0 && idx < embeddings.ids.length) {
+ const id = embeddings.ids[idx];
+
+ if (e.metaKey || e.ctrlKey) {
+ const next = new Set(selectedIds);
+ if (next.has(id)) next.delete(id);
+ else next.add(id);
+ setSelectedIds(next, "scatter");
+ } else {
+ setSelectedIds(new Set([id]), "scatter");
+ }
+ }
+ }
+
+ stopInteraction();
+ requestRender();
+ },
+ [
+ beginLassoSelection,
+ embeddings,
+ getCanvasPos,
+ redrawOverlay,
+ requestRender,
+ selectedIds,
+ setSelectedIds,
+ stopInteraction,
+ ]
+ );
+
+ const handlePointerLeave = useCallback(
+ (_e: React.PointerEvent) => {
+ const renderer = rendererRef.current;
+ if (renderer) {
+ hoveredIndexRef.current = -1;
+ setHoveredId(null);
+ renderer.setHovered(-1);
+ requestRender();
+ }
+ stopInteraction();
+ },
+ [requestRender, setHoveredId, stopInteraction]
+ );
+
+ const handleDoubleClick = useCallback(
+ (_e: React.MouseEvent) => {
+ const renderer = rendererRef.current;
+ if (!renderer) return;
+ clearPersistentLasso();
+ stopInteraction();
+
+ renderer.setSelection(new Set());
+ setSelectedIds(new Set(), "scatter");
+
+ requestRender();
+ },
+ [clearPersistentLasso, requestRender, setSelectedIds, stopInteraction]
+ );
+
+ return {
+ canvasRef,
+ overlayCanvasRef,
+ containerRef,
+ handlePointerDown,
+ handlePointerMove,
+ handlePointerUp,
+ handlePointerLeave,
+ handleDoubleClick,
+ rendererError,
+ };
+}
diff --git a/frontend/src/components/useLabelLegend.ts b/frontend/src/components/useLabelLegend.ts
new file mode 100644
index 0000000..f14ee8c
--- /dev/null
+++ b/frontend/src/components/useLabelLegend.ts
@@ -0,0 +1,74 @@
+import { useMemo } from "react";
+
+import type { DatasetInfo, EmbeddingsData } from "@/types";
+import {
+ buildLabelColorMap,
+ buildLabelCounts,
+ buildLabelUniverse,
+ buildLabelsInfo,
+ buildLegendLabels,
+} from "@/lib/labelLegend";
+import { useColorSettings } from "@/store/useColorSettings";
+
+interface UseLabelLegendArgs {
+ datasetInfo: DatasetInfo | null;
+ embeddings: EmbeddingsData | null;
+ labelSearch?: string;
+ labelFilter?: string | null;
+}
+
+export function useLabelLegend({
+ datasetInfo,
+ embeddings,
+ labelSearch = "",
+ labelFilter = null,
+}: UseLabelLegendArgs) {
+ const labelColorMapId = useColorSettings((state) => state.labelColorMapId);
+
+ const labelCounts = useMemo(() => buildLabelCounts(embeddings), [embeddings]);
+
+ const labelUniverse = useMemo(
+ () => buildLabelUniverse(datasetInfo?.labels ?? [], embeddings?.labels ?? null),
+ [datasetInfo?.labels, embeddings?.labels]
+ );
+
+ const labelsInfo = useMemo(
+ () =>
+ buildLabelsInfo({
+ datasetLabels: datasetInfo?.labels ?? [],
+ embeddings,
+ labelColorMapId,
+ labelFilter,
+ }),
+ [datasetInfo?.labels, embeddings, labelColorMapId, labelFilter]
+ );
+
+ const labelColorMap = useMemo(
+ () =>
+ buildLabelColorMap({
+ labelsInfo,
+ labelUniverse,
+ labelColorMapId,
+ labelFilter,
+ }),
+ [labelsInfo, labelUniverse, labelColorMapId, labelFilter]
+ );
+
+ const legendLabels = useMemo(
+ () =>
+ buildLegendLabels({
+ labelUniverse,
+ labelCounts,
+ query: labelSearch,
+ }),
+ [labelUniverse, labelCounts, labelSearch]
+ );
+
+ return {
+ labelCounts,
+ labelUniverse,
+ labelsInfo,
+ labelColorMap,
+ legendLabels,
+ };
+}
diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts
index c923cd3..1d420dc 100644
--- a/frontend/src/lib/api.ts
+++ b/frontend/src/lib/api.ts
@@ -1,6 +1,15 @@
-import type { DatasetInfo, EmbeddingsData, Sample, SamplesResponse } from "@/types";
+import type {
+ CurationFilterRequest,
+ CurationFilterResponse,
+ CurationStats,
+ DatasetInfo,
+ EmbeddingsData,
+ Sample,
+ SamplesResponse,
+ VideoAnnotation,
+} from "@/types";
-const API_BASE = process.env.NODE_ENV === "development" ? "http://127.0.0.1:5151" : "";
+const API_BASE = process.env.NODE_ENV === "development" ? "http://127.0.0.1:6263" : "";
export async function fetchDataset(): Promise {
const res = await fetch(`${API_BASE}/api/dataset`);
@@ -30,8 +39,13 @@ export async function fetchSamples(
return res.json();
}
-export async function fetchEmbeddings(): Promise {
- const res = await fetch(`${API_BASE}/api/embeddings`);
+export async function fetchEmbeddings(layoutKey?: string): Promise {
+ const params = new URLSearchParams();
+ if (layoutKey) {
+ params.set("layout_key", layoutKey);
+ }
+ const query = params.toString();
+ const res = await fetch(`${API_BASE}/api/embeddings${query ? `?${query}` : ""}`);
if (!res.ok) {
throw new Error(`Failed to fetch embeddings: ${res.statusText}`);
}
@@ -60,3 +74,76 @@ export async function fetchSamplesBatch(sampleIds: string[]): Promise
const data = await res.json();
return data.samples;
}
+
+export interface LassoSelectionResponse {
+ total: number;
+ offset: number;
+ limit: number;
+ sample_ids: string[];
+ samples: Sample[];
+}
+
+export async function fetchLassoSelection(args: {
+ layoutKey: string;
+ polygon: ArrayLike;
+ offset?: number;
+ limit?: number;
+ includeThumbnails?: boolean;
+ signal?: AbortSignal;
+}): Promise {
+ const res = await fetch(`${API_BASE}/api/selection/lasso`, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({
+ layout_key: args.layoutKey,
+ polygon: Array.from(args.polygon),
+ offset: args.offset ?? 0,
+ limit: args.limit ?? 100,
+ include_thumbnails: args.includeThumbnails ?? true,
+ }),
+ signal: args.signal,
+ });
+ if (!res.ok) {
+ throw new Error(`Failed to fetch lasso selection: ${res.statusText}`);
+ }
+ return res.json();
+}
+
+export function getVideoUrl(sampleId: string): string {
+ return `${API_BASE}/api/video/${sampleId}`;
+}
+
+export async function fetchAnnotations(sampleId: string): Promise {
+ const res = await fetch(`${API_BASE}/api/annotations/${sampleId}`);
+ if (!res.ok) {
+ throw new Error(`Failed to fetch annotations: ${res.statusText}`);
+ }
+ return res.json();
+}
+
+export async function fetchCurationStats(): Promise {
+ const res = await fetch(`${API_BASE}/api/curation/stats`);
+ if (!res.ok) {
+ throw new Error(`Failed to fetch curation stats: ${res.statusText}`);
+ }
+ return res.json();
+}
+
+export async function postCurationFilter(
+ request: CurationFilterRequest
+): Promise {
+ const res = await fetch(`${API_BASE}/api/curation/filter`, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify(request),
+ });
+
+ if (!res.ok) {
+ throw new Error(`Failed to filter curation samples: ${res.statusText}`);
+ }
+ return res.json();
+}
diff --git a/frontend/src/lib/colorTransfer.ts b/frontend/src/lib/colorTransfer.ts
new file mode 100644
index 0000000..fab7532
--- /dev/null
+++ b/frontend/src/lib/colorTransfer.ts
@@ -0,0 +1,26 @@
+import {
+ FALLBACK_LABEL_COLOR,
+ createLabelColorMap,
+ normalizeLabel,
+ type LabelColorMapId,
+} from "@/lib/labelColors";
+
+export interface CategoricalLabelTransferFunction {
+ kind: "categorical";
+ paletteId: LabelColorMapId;
+ colorFor: (label: string | null | undefined) => string;
+}
+
+export function createCategoricalLabelTransferFunction(params: {
+ labels: string[];
+ paletteId: LabelColorMapId;
+}): CategoricalLabelTransferFunction {
+ const { labels, paletteId } = params;
+ const colorMap = createLabelColorMap(labels, { paletteId });
+
+ return {
+ kind: "categorical",
+ paletteId,
+ colorFor: (label) => colorMap[normalizeLabel(label)] ?? FALLBACK_LABEL_COLOR,
+ };
+}
diff --git a/frontend/src/lib/labelColors.ts b/frontend/src/lib/labelColors.ts
new file mode 100644
index 0000000..d38e64b
--- /dev/null
+++ b/frontend/src/lib/labelColors.ts
@@ -0,0 +1,241 @@
+const MISSING_LABEL_SENTINEL = "undefined";
+
+export const MISSING_LABEL_COLOR = "#39d3cc"; // matches --accent-cyan
+export const FALLBACK_LABEL_COLOR = "#8b949e"; // matches --muted-foreground
+
+export const LABEL_COLOR_MAP_IDS = ["auto", "classic20", "tab10", "tab20", "wong"] as const;
+export type LabelColorMapId = (typeof LABEL_COLOR_MAP_IDS)[number];
+
+const AUTO_DEFAULT_PALETTE_ID: LabelColorMapId = "tab20";
+const OVERFLOW_HUE_STEP_DEGREES = 137.508;
+
+export function isLabelColorMapId(value: string): value is LabelColorMapId {
+ return LABEL_COLOR_MAP_IDS.includes(value as LabelColorMapId);
+}
+
+const CLASSIC_20_LABEL_PALETTE = [
+ "#e6194b",
+ "#3cb44b",
+ "#ffe119",
+ "#4363d8",
+ "#f58231",
+ "#911eb4",
+ "#46f0f0",
+ "#f032e6",
+ "#bcf60c",
+ "#fabebe",
+ "#008080",
+ "#e6beff",
+ "#9a6324",
+ "#fffac8",
+ "#800000",
+ "#aaffc3",
+ "#808000",
+ "#ffd8b1",
+ "#000075",
+ "#808080",
+];
+
+const TAB_10_LABEL_PALETTE = [
+ "#4e79a7",
+ "#f28e2b",
+ "#e15759",
+ "#76b7b2",
+ "#59a14f",
+ "#edc948",
+ "#b07aa1",
+ "#ff9da7",
+ "#9c755f",
+ "#bab0ab",
+];
+
+const TAB_20_LABEL_PALETTE = [
+ "#1f77b4",
+ "#aec7e8",
+ "#ff7f0e",
+ "#ffbb78",
+ "#2ca02c",
+ "#98df8a",
+ "#d62728",
+ "#ff9896",
+ "#9467bd",
+ "#c5b0d5",
+ "#8c564b",
+ "#c49c94",
+ "#e377c2",
+ "#f7b6d2",
+ "#7f7f7f",
+ "#c7c7c7",
+ "#bcbd22",
+ "#dbdb8d",
+ "#17becf",
+ "#9edae5",
+];
+
+const WONG_LABEL_PALETTE = [
+ "#e69f00",
+ "#56b4e9",
+ "#009e73",
+ "#f0e442",
+ "#0072b2",
+ "#d55e00",
+ "#cc79a7",
+];
+
+function clamp01(v: number): number {
+ if (v < 0) return 0;
+ if (v > 1) return 1;
+ return v;
+}
+
+function hslToHex(hDegrees: number, saturation: number, lightness: number): string {
+ const h = ((((hDegrees % 360) + 360) % 360) / 360) % 1;
+ const s = clamp01(saturation);
+ const l = clamp01(lightness);
+
+ let r = l;
+ let g = l;
+ let b = l;
+
+ if (s > 0) {
+ const q = l < 0.5 ? l * (1 + s) : l + s - l * s;
+ const p = 2 * l - q;
+
+ const hueToChannel = (tIn: number): number => {
+ let t = tIn;
+ if (t < 0) t += 1;
+ if (t > 1) t -= 1;
+ if (t < 1 / 6) return p + (q - p) * 6 * t;
+ if (t < 1 / 2) return q;
+ if (t < 2 / 3) return p + (q - p) * (2 / 3 - t) * 6;
+ return p;
+ };
+
+ r = hueToChannel(h + 1 / 3);
+ g = hueToChannel(h);
+ b = hueToChannel(h - 1 / 3);
+ }
+
+ const toHex = (x: number) => {
+ const n = Math.round(clamp01(x) * 255);
+ return n.toString(16).padStart(2, "0");
+ };
+
+ return `#${toHex(r)}${toHex(g)}${toHex(b)}`;
+}
+
+function createOverflowColor(index: number, seedPrefix: LabelColorMapId): string {
+ const paletteSeedShift =
+ seedPrefix === "tab10"
+ ? 17
+ : seedPrefix === "tab20"
+ ? 43
+ : seedPrefix === "wong"
+ ? 71
+ : seedPrefix === "classic20"
+ ? 101
+ : 131;
+
+ const hue = (paletteSeedShift + index * OVERFLOW_HUE_STEP_DEGREES) % 360;
+ const saturationBands = [0.72, 0.64, 0.78];
+ const lightnessBands = [0.46, 0.54, 0.38, 0.62];
+
+ const saturation = saturationBands[index % saturationBands.length];
+ const lightness =
+ lightnessBands[Math.floor(index / saturationBands.length) % lightnessBands.length];
+
+ return hslToHex(hue, saturation, lightness);
+}
+
+function stableLabelSort(a: string, b: string): number {
+ if (a === MISSING_LABEL_SENTINEL && b !== MISSING_LABEL_SENTINEL) return 1;
+ if (b === MISSING_LABEL_SENTINEL && a !== MISSING_LABEL_SENTINEL) return -1;
+ return a.localeCompare(b);
+}
+
+function createAutoLabelColorMap(unique: string[]): Record {
+ return createPaletteLabelColorMap(unique, TAB_20_LABEL_PALETTE, AUTO_DEFAULT_PALETTE_ID);
+}
+
+function createPaletteLabelColorMap(
+ unique: string[],
+ palette: string[],
+ seedPrefix: LabelColorMapId
+): Record {
+ const colors: Record = {};
+ const used = new Set();
+ let nonMissingIndex = 0;
+ let overflowIndex = 0;
+
+ for (const label of unique) {
+ if (label === MISSING_LABEL_SENTINEL) {
+ colors[label] = MISSING_LABEL_COLOR;
+ used.add(MISSING_LABEL_COLOR.toLowerCase());
+ continue;
+ }
+
+ let candidate =
+ nonMissingIndex < palette.length
+ ? (palette[nonMissingIndex] ?? FALLBACK_LABEL_COLOR)
+ : createOverflowColor(overflowIndex, seedPrefix);
+
+ let safety = 0;
+ while (used.has(candidate.toLowerCase()) && safety < 2048) {
+ overflowIndex += 1;
+ candidate = createOverflowColor(overflowIndex, seedPrefix);
+ safety += 1;
+ }
+
+ if (nonMissingIndex >= palette.length) {
+ overflowIndex += 1;
+ }
+
+ if (used.has(candidate.toLowerCase())) {
+ candidate = FALLBACK_LABEL_COLOR;
+ }
+
+ colors[label] = candidate;
+ used.add(candidate.toLowerCase());
+ nonMissingIndex += 1;
+ }
+
+ return colors;
+}
+
+/**
+ * Builds a deterministic label → color mapping for the given label universe.
+ *
+ * Notes:
+ * - `auto` mode uses `tab20` as the default categorical palette.
+ * - `classic20`, `tab10`, `tab20`, and `wong` use fixed categorical palettes
+ * first, then deterministic overflow colors for additional labels.
+ */
+export function createLabelColorMap(
+ labels: string[],
+ options?: { paletteId?: LabelColorMapId }
+): Record {
+ const unique = Array.from(new Set(labels.map((l) => normalizeLabel(l)))).sort(stableLabelSort);
+ const paletteId = options?.paletteId ?? "auto";
+
+ if (paletteId === "classic20") {
+ return createPaletteLabelColorMap(unique, CLASSIC_20_LABEL_PALETTE, "classic20");
+ }
+
+ if (paletteId === "tab10") {
+ return createPaletteLabelColorMap(unique, TAB_10_LABEL_PALETTE, "tab10");
+ }
+
+ if (paletteId === "tab20") {
+ return createPaletteLabelColorMap(unique, TAB_20_LABEL_PALETTE, "tab20");
+ }
+
+ if (paletteId === "wong") {
+ return createPaletteLabelColorMap(unique, WONG_LABEL_PALETTE, "wong");
+ }
+
+ return createAutoLabelColorMap(unique);
+}
+
+export function normalizeLabel(label: string | null | undefined): string {
+ return label && label.length > 0 ? label : MISSING_LABEL_SENTINEL;
+}
diff --git a/frontend/src/lib/labelLegend.ts b/frontend/src/lib/labelLegend.ts
new file mode 100644
index 0000000..22546e3
--- /dev/null
+++ b/frontend/src/lib/labelLegend.ts
@@ -0,0 +1,234 @@
+import type { EmbeddingsData } from "@/types";
+import {
+ FALLBACK_LABEL_COLOR,
+ MISSING_LABEL_COLOR,
+ normalizeLabel,
+ type LabelColorMapId,
+} from "@/lib/labelColors";
+import { createCategoricalLabelTransferFunction } from "@/lib/colorTransfer";
+
+export const UNSELECTED_LABEL_ALPHA = 0.12;
+
+export interface ScatterLabelsInfo {
+ uniqueLabels: string[];
+ categories: Uint16Array;
+ palette: string[];
+}
+
+function clamp01(v: number): number {
+ if (v < 0) return 0;
+ if (v > 1) return 1;
+ return v;
+}
+
+function applyAlphaToHex(color: string, alpha: number): string {
+ if (!color.startsWith("#")) return color;
+ const hex = Math.round(clamp01(alpha) * 255)
+ .toString(16)
+ .padStart(2, "0");
+
+ if (color.length === 7) {
+ return `${color}${hex}`;
+ }
+
+ if (color.length === 9) {
+ return `${color.slice(0, 7)}${hex}`;
+ }
+
+ return color;
+}
+
+function applyLabelFilterToPalette(params: {
+ palette: string[];
+ labels: string[];
+ labelFilter: string | null;
+ unselectedAlpha: number;
+}): string[] {
+ const { palette, labels, labelFilter, unselectedAlpha } = params;
+ if (!labelFilter) return palette;
+ if (!labels.includes(labelFilter)) return palette;
+
+ return palette.map((color, idx) =>
+ labels[idx] === labelFilter ? color : applyAlphaToHex(color, unselectedAlpha)
+ );
+}
+
+export function buildLabelCounts(embeddings: EmbeddingsData | null): Map {
+ const counts = new Map();
+ if (!embeddings) return counts;
+ for (const raw of embeddings.labels) {
+ const l = normalizeLabel(raw);
+ counts.set(l, (counts.get(l) ?? 0) + 1);
+ }
+ return counts;
+}
+
+export function buildLabelUniverse(
+ datasetLabels: string[],
+ embeddingsLabels: (string | null)[] | null
+): string[] {
+ const universe: string[] = [];
+ const seen = new Set();
+ let hasMissing = false;
+
+ const baseLabels = datasetLabels.map((l) => normalizeLabel(l));
+ for (const l of baseLabels) {
+ if (l === "undefined") {
+ hasMissing = true;
+ continue;
+ }
+ if (seen.has(l)) continue;
+ seen.add(l);
+ universe.push(l);
+ }
+
+ if (embeddingsLabels) {
+ const extras = new Set();
+ for (const raw of embeddingsLabels) {
+ const l = normalizeLabel(raw);
+ if (l === "undefined") {
+ hasMissing = true;
+ continue;
+ }
+ if (!seen.has(l)) extras.add(l);
+ }
+
+ if (extras.size > 0) {
+ const extraSorted = Array.from(extras).sort((a, b) => a.localeCompare(b));
+ for (const l of extraSorted) {
+ seen.add(l);
+ universe.push(l);
+ }
+ }
+ }
+
+ if (hasMissing) universe.push("undefined");
+ return universe;
+}
+
+export function buildLabelsInfo(params: {
+ datasetLabels: string[];
+ embeddings: EmbeddingsData | null;
+ labelColorMapId: LabelColorMapId;
+ labelFilter?: string | null;
+ unselectedAlpha?: number;
+}): ScatterLabelsInfo | null {
+ const {
+ datasetLabels,
+ embeddings,
+ labelColorMapId,
+ labelFilter = null,
+ unselectedAlpha = UNSELECTED_LABEL_ALPHA,
+ } = params;
+ if (!embeddings) return null;
+
+ const universe = buildLabelUniverse(datasetLabels, embeddings.labels);
+
+ // Guard: hyper-scatter categories are Uint16.
+ if (universe.length > 65535) {
+ console.warn(
+ `Too many labels (${universe.length}) for uint16 categories; collapsing to a single color.`
+ );
+ return {
+ uniqueLabels: ["undefined"],
+ categories: new Uint16Array(embeddings.labels.length),
+ palette: [FALLBACK_LABEL_COLOR],
+ };
+ }
+
+ const labelToCategory: Record = {};
+ for (let i = 0; i < universe.length; i++) {
+ labelToCategory[universe[i]] = i;
+ }
+
+ const undefinedIndex = labelToCategory["undefined"] ?? 0;
+ const categories = new Uint16Array(embeddings.labels.length);
+ for (let i = 0; i < embeddings.labels.length; i++) {
+ const key = normalizeLabel(embeddings.labels[i]);
+ categories[i] = labelToCategory[key] ?? undefinedIndex;
+ }
+
+ const transfer = createCategoricalLabelTransferFunction({
+ labels: universe,
+ paletteId: labelColorMapId,
+ });
+ let palette = universe.map((l) => transfer.colorFor(l));
+
+ const filteredPalette = applyLabelFilterToPalette({
+ palette,
+ labels: universe,
+ labelFilter,
+ unselectedAlpha,
+ });
+
+ return { uniqueLabels: universe, categories, palette: filteredPalette };
+}
+
+export function buildLabelColorMap(params: {
+ labelsInfo: ScatterLabelsInfo | null;
+ labelUniverse: string[];
+ labelColorMapId: LabelColorMapId;
+ labelFilter?: string | null;
+ unselectedAlpha?: number;
+}): Record {
+ const {
+ labelsInfo,
+ labelUniverse,
+ labelColorMapId,
+ labelFilter = null,
+ unselectedAlpha = UNSELECTED_LABEL_ALPHA,
+ } = params;
+ const map: Record = {};
+
+ if (labelsInfo) {
+ for (let i = 0; i < labelsInfo.uniqueLabels.length; i++) {
+ map[labelsInfo.uniqueLabels[i]] = labelsInfo.palette[i] ?? FALLBACK_LABEL_COLOR;
+ }
+ return map;
+ }
+
+ if (labelUniverse.length === 0) return map;
+
+ const transfer = createCategoricalLabelTransferFunction({
+ labels: labelUniverse,
+ paletteId: labelColorMapId,
+ });
+ for (const label of labelUniverse) {
+ map[label] = transfer.colorFor(label);
+ }
+
+ if (!labelFilter || !labelUniverse.includes(labelFilter)) return map;
+
+ for (const label of labelUniverse) {
+ if (label !== labelFilter) {
+ map[label] = applyAlphaToHex(map[label], unselectedAlpha);
+ }
+ }
+
+ return map;
+}
+
+export function buildLegendLabels(params: {
+ labelUniverse: string[];
+ labelCounts: Map;
+ query: string;
+}): string[] {
+ const { labelUniverse, labelCounts, query } = params;
+ const all = labelUniverse.length > 0 ? [...labelUniverse] : Array.from(labelCounts.keys());
+ const q = query.trim().toLowerCase();
+ const filtered = q ? all.filter((l) => l.toLowerCase().includes(q)) : all;
+ const hasCounts = labelCounts.size > 0;
+
+ return filtered.sort((a, b) => {
+ if (a === "undefined" && b !== "undefined") return 1;
+ if (b === "undefined" && a !== "undefined") return -1;
+
+ if (hasCounts) {
+ const ca = labelCounts.get(a) ?? 0;
+ const cb = labelCounts.get(b) ?? 0;
+ if (cb !== ca) return cb - ca;
+ }
+
+ return a.localeCompare(b);
+ });
+}
diff --git a/frontend/src/lib/layouts.ts b/frontend/src/lib/layouts.ts
new file mode 100644
index 0000000..2f02e92
--- /dev/null
+++ b/frontend/src/lib/layouts.ts
@@ -0,0 +1,17 @@
+import type { Geometry, LayoutInfo } from "@/types";
+
+export function listAvailableGeometries(layouts: LayoutInfo[]): Geometry[] {
+ const geometries = new Set();
+ for (const layout of layouts) {
+ geometries.add(layout.geometry);
+ }
+ return Array.from(geometries);
+}
+
+export function findLayoutByGeometry(layouts: LayoutInfo[], geometry: Geometry): LayoutInfo | undefined {
+ return layouts.find((l) => l.geometry === geometry);
+}
+
+export function findLayoutByKey(layouts: LayoutInfo[], layoutKey: string): LayoutInfo | undefined {
+ return layouts.find((l) => l.layout_key === layoutKey);
+}
diff --git a/frontend/src/lib/utils.ts b/frontend/src/lib/utils.ts
new file mode 100644
index 0000000..bd0c391
--- /dev/null
+++ b/frontend/src/lib/utils.ts
@@ -0,0 +1,6 @@
+import { clsx, type ClassValue } from "clsx"
+import { twMerge } from "tailwind-merge"
+
+export function cn(...inputs: ClassValue[]) {
+ return twMerge(clsx(inputs))
+}
diff --git a/frontend/src/store/useColorSettings.ts b/frontend/src/store/useColorSettings.ts
new file mode 100644
index 0000000..56d0971
--- /dev/null
+++ b/frontend/src/store/useColorSettings.ts
@@ -0,0 +1,36 @@
+import { create } from "zustand";
+import { createJSONStorage, persist } from "zustand/middleware";
+
+import type { LabelColorMapId } from "@/lib/labelColors";
+
+export interface LabelColorMapOption {
+ value: LabelColorMapId;
+ label: string;
+}
+
+export const LABEL_COLOR_MAP_OPTIONS: LabelColorMapOption[] = [
+ { value: "auto", label: "Auto" },
+ { value: "tab20", label: "Tab 20" },
+ { value: "tab10", label: "Tab 10" },
+ { value: "wong", label: "Wong" },
+ { value: "classic20", label: "Classic 20" },
+];
+
+interface ColorSettingsState {
+ labelColorMapId: LabelColorMapId;
+ setLabelColorMapId: (value: LabelColorMapId) => void;
+}
+
+export const useColorSettings = create()(
+ persist(
+ (set) => ({
+ labelColorMapId: "auto",
+ setLabelColorMapId: (value) => set({ labelColorMapId: value }),
+ }),
+ {
+ name: "hyperview-color-settings",
+ version: 1,
+ storage: createJSONStorage(() => localStorage),
+ }
+ )
+);
diff --git a/frontend/src/store/useStore.ts b/frontend/src/store/useStore.ts
index 59abb54..6285652 100644
--- a/frontend/src/store/useStore.ts
+++ b/frontend/src/store/useStore.ts
@@ -1,68 +1,138 @@
import { create } from "zustand";
-import type { DatasetInfo, EmbeddingsData, Sample, ViewMode } from "@/types";
+import type {
+ CurationFilterRequest,
+ DatasetInfo,
+ EmbeddingsData,
+ Sample,
+ VideoAnnotation,
+} from "@/types";
+import { normalizeLabel } from "@/lib/labelColors";
+
+type SelectionSource = "scatter" | "grid" | "lasso" | "label" | "curation" | null;
+
+export interface CurationFilterState {
+ minAestheticScore: number;
+ minMotionScore: number;
+ maxCosineSimilarity: number;
+ captionQuery: string;
+ dedupStatus: "all" | "kept" | "removed" | "unknown";
+}
+
+const DEFAULT_CURATION_FILTERS: CurationFilterState = {
+ minAestheticScore: 0,
+ minMotionScore: 0,
+ maxCosineSimilarity: 1,
+ captionQuery: "",
+ dedupStatus: "all",
+};
+
+function computeLabelSelection(embeddings: EmbeddingsData, label: string): Set {
+ const target = normalizeLabel(label);
+ const ids = new Set();
+ for (let i = 0; i < embeddings.labels.length; i++) {
+ if (normalizeLabel(embeddings.labels[i]) === target) {
+ ids.add(embeddings.ids[i]);
+ }
+ }
+ return ids;
+}
interface AppState {
- // Dataset info
+ leftPanelOpen: boolean;
+ rightPanelOpen: boolean;
+ bottomPanelOpen: boolean;
+ setLeftPanelOpen: (open: boolean) => void;
+ setRightPanelOpen: (open: boolean) => void;
+ setBottomPanelOpen: (open: boolean) => void;
+
datasetInfo: DatasetInfo | null;
setDatasetInfo: (info: DatasetInfo) => void;
- // Samples
samples: Sample[];
totalSamples: number;
+ samplesLoaded: number;
setSamples: (samples: Sample[], total: number) => void;
appendSamples: (samples: Sample[]) => void;
addSamplesIfMissing: (samples: Sample[]) => void;
- // Embeddings
- embeddings: EmbeddingsData | null;
- setEmbeddings: (data: EmbeddingsData) => void;
+ embeddingsByLayoutKey: Record;
+ setEmbeddingsForLayout: (layoutKey: string, data: EmbeddingsData) => void;
- // View mode (euclidean or hyperbolic)
- viewMode: ViewMode;
- setViewMode: (mode: ViewMode) => void;
+ activeLayoutKey: string | null;
+ setActiveLayoutKey: (layoutKey: string | null) => void;
+
+ labelFilter: string | null;
+ setLabelFilter: (label: string | null) => void;
- // Selection
selectedIds: Set;
isLassoSelection: boolean;
- setSelectedIds: (ids: Set, isLasso?: boolean) => void;
+ selectionSource: SelectionSource;
+ setSelectedIds: (ids: Set, source?: "scatter" | "grid" | "label" | "curation") => void;
toggleSelection: (id: string) => void;
addToSelection: (ids: string[]) => void;
clearSelection: () => void;
- // Hover state
+ lassoQuery: { layoutKey: string; polygon: number[] } | null;
+ lassoSamples: Sample[];
+ lassoTotal: number;
+ lassoIsLoading: boolean;
+ beginLassoSelection: (query: { layoutKey: string; polygon: number[] }) => void;
+ setLassoResults: (samples: Sample[], total: number, append?: boolean) => void;
+ clearLassoSelection: () => void;
+
+ activeVideoId: string | null;
+ setActiveVideoId: (id: string | null) => void;
+ globalSeekTime: number;
+ setGlobalSeekTime: (seconds: number) => void;
+ isTimelinePlaying: boolean;
+ setTimelinePlaying: (playing: boolean) => void;
+ videoDurations: Record;
+ setVideoDuration: (sampleId: string, duration: number) => void;
+ previewVideoId: string | null;
+ setPreviewVideoId: (sampleId: string | null) => void;
+
+ curationFilters: CurationFilterState;
+ setCurationFilters: (patch: Partial) => void;
+ resetCurationFilters: () => void;
+ curationQuery: CurationFilterRequest | null;
+ setCurationQuery: (query: CurationFilterRequest | null) => void;
+
+ annotationCache: Record;
+ cacheAnnotation: (sampleId: string, annotation: VideoAnnotation) => void;
+
hoveredId: string | null;
setHoveredId: (id: string | null) => void;
- // Loading states
isLoading: boolean;
setIsLoading: (loading: boolean) => void;
- // Error state
error: string | null;
setError: (error: string | null) => void;
-
- // Label filter
- filterLabel: string | null;
- setFilterLabel: (label: string | null) => void;
-
- // UI state
- showLabels: boolean;
- setShowLabels: (show: boolean) => void;
}
-export const useStore = create((set, get) => ({
- // Dataset info
+export const useStore = create((set) => ({
+ leftPanelOpen: false,
+ rightPanelOpen: false,
+ bottomPanelOpen: false,
+ setLeftPanelOpen: (open) => set({ leftPanelOpen: open }),
+ setRightPanelOpen: (open) => set({ rightPanelOpen: open }),
+ setBottomPanelOpen: (open) => set({ bottomPanelOpen: open }),
+
datasetInfo: null,
setDatasetInfo: (info) => set({ datasetInfo: info }),
- // Samples
samples: [],
totalSamples: 0,
- setSamples: (samples, total) => set({ samples, totalSamples: total }),
+ samplesLoaded: 0,
+ setSamples: (samples, total) => set({ samples, totalSamples: total, samplesLoaded: samples.length }),
appendSamples: (newSamples) =>
- set((state) => ({
- samples: [...state.samples, ...newSamples],
- })),
+ set((state) => {
+ const existingIds = new Set(state.samples.map((s) => s.id));
+ const toAdd = newSamples.filter((s) => !existingIds.has(s.id));
+ const samplesLoaded = state.samplesLoaded + newSamples.length;
+ if (toAdd.length === 0) return { samplesLoaded };
+ return { samples: [...state.samples, ...toAdd], samplesLoaded };
+ }),
addSamplesIfMissing: (newSamples) =>
set((state) => {
const existingIds = new Set(state.samples.map((s) => s.id));
@@ -71,18 +141,94 @@ export const useStore = create((set, get) => ({
return { samples: [...state.samples, ...toAdd] };
}),
- // Embeddings
- embeddings: null,
- setEmbeddings: (data) => set({ embeddings: data }),
+ embeddingsByLayoutKey: {},
+ setEmbeddingsForLayout: (layoutKey, data) =>
+ set((state) => {
+ const selectionUpdate =
+ state.labelFilter &&
+ state.selectionSource === "label" &&
+ state.activeLayoutKey === layoutKey
+ ? {
+ selectedIds: computeLabelSelection(data, state.labelFilter),
+ selectionSource: "label" as const,
+ }
+ : {};
+
+ return {
+ embeddingsByLayoutKey: { ...state.embeddingsByLayoutKey, [layoutKey]: data },
+ ...selectionUpdate,
+ };
+ }),
- // View mode
- viewMode: "hyperbolic",
- setViewMode: (mode) => set({ viewMode: mode }),
+ activeLayoutKey: null,
+ setActiveLayoutKey: (layoutKey) =>
+ set((state) => {
+ if (!layoutKey) return { activeLayoutKey: null };
+ if (!state.labelFilter || state.selectionSource !== "label") {
+ return { activeLayoutKey: layoutKey };
+ }
+
+ const embeddings = state.embeddingsByLayoutKey[layoutKey];
+ if (!embeddings) {
+ return {
+ activeLayoutKey: layoutKey,
+ selectedIds: new Set(),
+ selectionSource: "label",
+ };
+ }
+
+ return {
+ activeLayoutKey: layoutKey,
+ selectedIds: computeLabelSelection(embeddings, state.labelFilter),
+ selectionSource: "label",
+ };
+ }),
+
+ labelFilter: null,
+ setLabelFilter: (label) =>
+ set((state) => {
+ const nextLabel = label ? normalizeLabel(label) : null;
+ const nextState: Partial = { labelFilter: nextLabel };
+
+ if (nextLabel) {
+ const layoutKey = state.activeLayoutKey;
+ const embeddings = layoutKey ? state.embeddingsByLayoutKey[layoutKey] : null;
+ nextState.selectedIds = embeddings ? computeLabelSelection(embeddings, nextLabel) : new Set();
+ nextState.selectionSource = "label";
+ nextState.isLassoSelection = false;
+ nextState.lassoQuery = null;
+ nextState.lassoSamples = [];
+ nextState.lassoTotal = 0;
+ nextState.lassoIsLoading = false;
+ } else if (state.selectionSource === "label") {
+ nextState.selectedIds = new Set();
+ nextState.selectionSource = null;
+ }
+
+ return nextState;
+ }),
- // Selection
selectedIds: new Set(),
isLassoSelection: false,
- setSelectedIds: (ids, isLasso = false) => set({ selectedIds: ids, isLassoSelection: isLasso }),
+ selectionSource: null,
+ setSelectedIds: (ids, source = "grid") =>
+ set((state) => {
+ const nextActive =
+ ids.size > 0 && source !== "label"
+ ? (Array.from(ids)[0] ?? state.activeVideoId)
+ : state.activeVideoId;
+
+ return {
+ selectedIds: ids,
+ selectionSource: ids.size > 0 ? source : null,
+ isLassoSelection: false,
+ lassoQuery: null,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: false,
+ activeVideoId: nextActive,
+ };
+ }),
toggleSelection: (id) =>
set((state) => {
const newSet = new Set(state.selectedIds);
@@ -91,35 +237,124 @@ export const useStore = create((set, get) => ({
} else {
newSet.add(id);
}
- // Manual selection from image grid, not lasso
- return { selectedIds: newSet, isLassoSelection: false };
+ return {
+ selectedIds: newSet,
+ selectionSource: newSet.size > 0 ? "grid" : null,
+ isLassoSelection: false,
+ lassoQuery: null,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: false,
+ activeVideoId:
+ newSet.size > 0
+ ? (newSet.has(id) ? id : (Array.from(newSet)[0] ?? state.activeVideoId))
+ : state.activeVideoId,
+ };
}),
addToSelection: (ids) =>
set((state) => {
const newSet = new Set(state.selectedIds);
ids.forEach((id) => newSet.add(id));
- // Manual selection from image grid, not lasso
- return { selectedIds: newSet, isLassoSelection: false };
+ return {
+ selectedIds: newSet,
+ selectionSource: newSet.size > 0 ? "grid" : null,
+ isLassoSelection: false,
+ lassoQuery: null,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: false,
+ activeVideoId: ids[ids.length - 1] ?? state.activeVideoId,
+ };
+ }),
+ clearSelection: () =>
+ set({
+ selectedIds: new Set(),
+ selectionSource: null,
+ isLassoSelection: false,
+ lassoQuery: null,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: false,
}),
- clearSelection: () => set({ selectedIds: new Set(), isLassoSelection: false }),
- // Hover
+ lassoQuery: null,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: false,
+ beginLassoSelection: (query) =>
+ set({
+ isLassoSelection: true,
+ selectedIds: new Set(),
+ selectionSource: "lasso",
+ lassoQuery: query,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: true,
+ }),
+ setLassoResults: (samples, total, append = false) =>
+ set((state) => ({
+ lassoSamples: append ? [...state.lassoSamples, ...samples] : samples,
+ lassoTotal: total,
+ lassoIsLoading: false,
+ })),
+ clearLassoSelection: () =>
+ set({
+ isLassoSelection: false,
+ selectionSource: null,
+ lassoQuery: null,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: false,
+ }),
+
+ activeVideoId: null,
+ setActiveVideoId: (id) => set({ activeVideoId: id }),
+ globalSeekTime: 0,
+ setGlobalSeekTime: (seconds) => set({ globalSeekTime: Math.max(0, seconds) }),
+ isTimelinePlaying: false,
+ setTimelinePlaying: (playing) => set({ isTimelinePlaying: playing }),
+ videoDurations: {},
+ setVideoDuration: (sampleId, duration) =>
+ set((state) => ({
+ videoDurations: {
+ ...state.videoDurations,
+ [sampleId]: Math.max(0, duration),
+ },
+ })),
+ previewVideoId: null,
+ setPreviewVideoId: (sampleId) => set({ previewVideoId: sampleId }),
+
+ curationFilters: DEFAULT_CURATION_FILTERS,
+ setCurationFilters: (patch) =>
+ set((state) => ({ curationFilters: { ...state.curationFilters, ...patch } })),
+ resetCurationFilters: () => set({ curationFilters: { ...DEFAULT_CURATION_FILTERS }, curationQuery: null }),
+ curationQuery: null,
+ setCurationQuery: (query) =>
+ set({
+ curationQuery: query,
+ isLassoSelection: false,
+ lassoQuery: null,
+ lassoSamples: [],
+ lassoTotal: 0,
+ lassoIsLoading: false,
+ selectionSource: query ? "curation" : null,
+ }),
+
+ annotationCache: {},
+ cacheAnnotation: (sampleId, annotation) =>
+ set((state) => ({
+ annotationCache: {
+ ...state.annotationCache,
+ [sampleId]: annotation,
+ },
+ })),
+
hoveredId: null,
setHoveredId: (id) => set({ hoveredId: id }),
- // Loading
isLoading: false,
setIsLoading: (loading) => set({ isLoading: loading }),
- // Error
error: null,
setError: (error) => set({ error }),
-
- // Label filter
- filterLabel: null,
- setFilterLabel: (label) => set({ filterLabel: label }),
-
- // UI state
- showLabels: true,
- setShowLabels: (show) => set({ showLabels: show }),
}));
diff --git a/frontend/src/types/index.ts b/frontend/src/types/index.ts
index bfb2616..8aeb275 100644
--- a/frontend/src/types/index.ts
+++ b/frontend/src/types/index.ts
@@ -5,23 +5,45 @@ export interface Sample {
label: string | null;
thumbnail: string | null;
metadata: Record;
- embedding_2d?: [number, number];
- embedding_2d_hyperbolic?: [number, number];
+ width: number | null;
+ height: number | null;
+}
+
+export type Geometry = "euclidean" | "poincare";
+
+export interface SpaceInfo {
+ space_key: string;
+ model_id: string;
+ dim: number;
+ count: number;
+ provider: string;
+ geometry: Geometry | string;
+ config: Record | null;
+}
+
+export interface LayoutInfo {
+ layout_key: string;
+ space_key: string;
+ method: string;
+ geometry: Geometry;
+ count: number;
+ params: Record | null;
}
export interface DatasetInfo {
name: string;
num_samples: number;
labels: string[];
- label_colors: Record;
+ spaces: SpaceInfo[];
+ layouts: LayoutInfo[];
}
export interface EmbeddingsData {
+ layout_key: string;
+ geometry: Geometry;
ids: string[];
labels: (string | null)[];
- euclidean: [number, number][];
- hyperbolic: [number, number][];
- label_colors: Record;
+ coords: [number, number][];
}
export interface SamplesResponse {
@@ -31,4 +53,64 @@ export interface SamplesResponse {
samples: Sample[];
}
-export type ViewMode = "euclidean" | "hyperbolic";
+export interface VideoAnnotation {
+ id: string;
+ caption: string | null;
+ reasoning: string | null;
+ raw_caption: string | null;
+ aesthetic_score: number | null;
+ motion_score: number | null;
+ dedup_status: string | null;
+ dedup_keep: boolean | null;
+ cosine_sim_score: number | null;
+ source_video: string | null;
+ video_path: string | null;
+ span: [number, number] | number[] | null;
+}
+
+export interface HistogramBin {
+ start: number;
+ end: number;
+ count: number;
+}
+
+export interface ScoreSummary {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+}
+
+export interface CurationStats {
+ total_samples: number;
+ with_video: number;
+ with_caption: number;
+ dedup_counts: Record;
+ score_summary: {
+ aesthetic: ScoreSummary;
+ motion: ScoreSummary;
+ };
+ aesthetic_histogram: HistogramBin[];
+ motion_histogram: HistogramBin[];
+}
+
+export type DedupStatus = "kept" | "removed" | "unknown";
+
+export interface CurationFilterRequest {
+ min_aesthetic_score?: number;
+ min_motion_score?: number;
+ max_cosine_similarity?: number;
+ caption_query?: string;
+ dedup_status?: DedupStatus;
+ offset?: number;
+ limit?: number;
+ include_thumbnails?: boolean;
+}
+
+export interface CurationFilterResponse {
+ total: number;
+ offset: number;
+ limit: number;
+ sample_ids: string[];
+ samples: Sample[];
+}
diff --git a/frontend/tailwind.config.ts b/frontend/tailwind.config.ts
index 7aa3713..9ebc9b5 100644
--- a/frontend/tailwind.config.ts
+++ b/frontend/tailwind.config.ts
@@ -1,6 +1,7 @@
import type { Config } from "tailwindcss";
export default {
+ darkMode: ["class"],
content: [
"./src/pages/**/*.{js,ts,jsx,tsx,mdx}",
"./src/components/**/*.{js,ts,jsx,tsx,mdx}",
@@ -9,17 +10,58 @@ export default {
theme: {
extend: {
colors: {
- // Dark theme colors (shadcn-style with Indigo primary)
- background: "#0a0a0b",
- surface: "#18181b",
- "surface-light": "#27272a",
- border: "#3f3f46",
- primary: "#4F46E5",
- "primary-light": "#818CF8",
- text: "#fafafa",
- "text-muted": "#a1a1aa",
+ // shadcn semantic colors (HSL-based)
+ background: "hsl(var(--background))",
+ foreground: "hsl(var(--foreground))",
+ card: {
+ DEFAULT: "hsl(var(--card))",
+ foreground: "hsl(var(--card-foreground))",
+ },
+ popover: {
+ DEFAULT: "hsl(var(--popover))",
+ foreground: "hsl(var(--popover-foreground))",
+ },
+ primary: {
+ DEFAULT: "hsl(var(--primary))",
+ foreground: "hsl(var(--primary-foreground))",
+ },
+ secondary: {
+ DEFAULT: "hsl(var(--secondary))",
+ foreground: "hsl(var(--secondary-foreground))",
+ },
+ muted: {
+ DEFAULT: "hsl(var(--muted))",
+ foreground: "hsl(var(--muted-foreground))",
+ },
+ accent: {
+ DEFAULT: "hsl(var(--accent))",
+ foreground: "hsl(var(--accent-foreground))",
+ },
+ destructive: {
+ DEFAULT: "hsl(var(--destructive))",
+ foreground: "hsl(var(--destructive-foreground))",
+ },
+ border: "hsl(var(--border))",
+ input: "hsl(var(--input))",
+ ring: "hsl(var(--ring))",
+
+ // Rerun-specific aliases
+ surface: "hsl(var(--surface))",
+ "surface-light": "hsl(var(--surface-light))",
+ "surface-elevated": "hsl(var(--surface-elevated))",
+ "border-subtle": "hsl(var(--border-subtle))",
+ text: "hsl(var(--text))",
+ "text-muted": "hsl(var(--text-muted))",
+ "text-subtle": "hsl(var(--text-subtle))",
+ "accent-cyan": "hsl(var(--accent-cyan))",
+ "accent-orange": "hsl(var(--accent-orange))",
+ },
+ borderRadius: {
+ lg: "var(--radius)",
+ md: "calc(var(--radius) - 2px)",
+ sm: "calc(var(--radius) - 4px)",
},
},
},
- plugins: [],
+ plugins: [require("tailwindcss-animate")],
} satisfies Config;
diff --git a/notebooks/INSTALLATION.md b/notebooks/INSTALLATION.md
new file mode 100644
index 0000000..2e332d8
--- /dev/null
+++ b/notebooks/INSTALLATION.md
@@ -0,0 +1,80 @@
+# Installation Instructions
+
+This guide shows how to set up and run the demo notebook in VSCode.
+
+## Prerequisites
+
+- [uv](https://docs.astral.sh/uv/) package manager installed
+- VSCode with Python extension
+
+## Setup Steps
+
+### 1. Initialize the Project
+
+```bash
+uv init
+```
+
+This creates a new project named `hyperview-demo-notebook`.
+
+### 2. Create Virtual Environment
+
+```bash
+uv venv .venv
+```
+
+This creates a virtual environment using Python 3.13.2 in the `.venv` directory.
+
+### 3. Activate Virtual Environment
+
+```bash
+source .venv/bin/activate
+```
+
+Your terminal prompt should now show `(.venv)` at the beginning.
+
+### 4. Install Required Packages
+
+Install the packages in the following order:
+
+```bash
+uv pip install ipykernel
+```
+
+```bash
+uv pip install jupyter
+```
+
+```bash
+uv pip install hyperview
+```
+
+**Note:** Do not use commas between package names when installing multiple packages at once. Use spaces instead or install packages separately.
+
+## Verify Installation
+
+After installation, you should have:
+
+- Jupyter notebook support (68+ packages)
+- IPython kernel for running notebooks (30+ packages)
+- Hyperview and dependencies (59+ packages)
+
+## Running the Notebook
+
+1. Open the notebook file in VSCode
+2. Select the Python interpreter from `.venv` when prompted
+3. Run the notebook cells
+
+## Troubleshooting
+
+### Virtual Environment Not Activating
+
+If you see "no such file or directory" when activating, check the path:
+
+```bash
+source .venv/bin/activate
+```
+
+### Package Installation Errors
+
+If you get parsing errors during installation, avoid using commas in the package list. Install packages separately or use spaces to separate package names.
diff --git a/notebooks/colab_smoke_test.ipynb b/notebooks/colab_smoke_test.ipynb
new file mode 100644
index 0000000..0cea259
--- /dev/null
+++ b/notebooks/colab_smoke_test.ipynb
@@ -0,0 +1,115 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "47ee06e6",
+ "metadata": {},
+ "source": [
+ "# HyperView — Colab Smoke Test\n",
+ "\n",
+ "This notebook verifies that HyperView can launch inside Google Colab and open the UI in a new browser tab.\n",
+ "\n",
+ "The smoke test downloads a small set of public-domain NASA images and computes real embeddings using a lightweight model."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "945a1017",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%pip install hyperview"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "856febf3",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import hyperview as hv"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "58b82256",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import json\n",
+ "import random\n",
+ "import urllib.parse\n",
+ "import urllib.request\n",
+ "from pathlib import Path\n",
+ "\n",
+ "# Download a small set of NASA space images (public domain)\n",
+ "NUM_IMAGES = 24\n",
+ "CACHE_DIR = Path(\"/tmp/_nasa_smoke_images\")\n",
+ "NASA_QUERIES = [\"black hole\", \"nebula\", \"galaxy\", \"jwst\", \"hubble\"]\n",
+ "\n",
+ "CACHE_DIR.mkdir(parents=True, exist_ok=True)\n",
+ "image_files = [\n",
+ " p for p in CACHE_DIR.iterdir()\n",
+ " if p.is_file() and p.suffix.lower() in {\".jpg\", \".jpeg\", \".png\", \".webp\"}\n",
+ "]\n",
+ "if len(image_files) < NUM_IMAGES:\n",
+ " rng = random.Random(42)\n",
+ " query = rng.choice(NASA_QUERIES)\n",
+ " params = {\"q\": query, \"media_type\": \"image\", \"page\": \"1\"}\n",
+ " api_url = f\"https://images-api.nasa.gov/search?{urllib.parse.urlencode(params)}\"\n",
+ " items = json.load(urllib.request.urlopen(api_url))[\"collection\"][\"items\"]\n",
+ " rng.shuffle(items)\n",
+ "\n",
+ " for i, item in enumerate(items[:NUM_IMAGES]):\n",
+ " href = item[\"links\"][0][\"href\"]\n",
+ " ext = Path(urllib.parse.urlparse(href).path).suffix or \".jpg\"\n",
+ " urllib.request.urlretrieve(href, CACHE_DIR / f\"nasa_{i:03d}{ext}\")\n",
+ "\n",
+ "# Create dataset and add images\n",
+ "dataset = hv.Dataset(\"colab_smoke\", persist=False)\n",
+ "added, skipped = dataset.add_images_dir(str(CACHE_DIR), label_from_folder=False)\n",
+ "print(f\"✓ Loaded {added} NASA images\" + (f\" ({skipped} already present)\" if skipped else \"\"))\n",
+ "\n",
+ "# Compute embeddings with a lightweight model\n",
+ "MODEL = \"google/siglip-base-patch16-224\"\n",
+ "dataset.compute_embeddings(model=MODEL, show_progress=True)\n",
+ "dataset.compute_visualization()\n",
+ "\n",
+ "# Launch HyperView\n",
+ "hv.launch(dataset, port=6262)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b1cb3148",
+ "metadata": {},
+ "source": [
+ "Click the link printed above to open HyperView in a new tab. In Colab, you may need to copy/paste the URL."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": ".venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.8"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/notebooks/hyperview_clip_umap.py b/notebooks/hyperview_clip_umap.py
new file mode 100644
index 0000000..e6b5c8d
--- /dev/null
+++ b/notebooks/hyperview_clip_umap.py
@@ -0,0 +1,75 @@
+# %% [markdown]
+# # HyperView Demo: Image Embeddings + UMAP Visualization
+#
+# Demonstrates computing image embeddings and visualizing them in 2D using UMAP
+# with the HyperView Dataset API. Supports CLIP and SigLip models.
+
+# %%
+import json
+import random
+import urllib.parse
+import urllib.request
+from pathlib import Path
+
+import hyperview as hv
+
+# %% [markdown]
+# ## Download Random Space Images (NASA, Public Domain)
+#
+# This demo downloads a random set of space images from NASA's Images and Video Library
+# (https://images.nasa.gov). NASA imagery is generally public domain (US Government work).
+
+# %%
+NUM_IMAGES = 48
+CACHE_DIR = Path(__file__).parent / "_nasa_space_images"
+RANDOM_SEED = None # Set to an int (e.g. 0) for reproducible sampling
+NASA_QUERIES = ["black hole", "nebula", "galaxy", "jwst", "hubble"]
+
+
+CACHE_DIR.mkdir(parents=True, exist_ok=True)
+image_files = [
+ p
+ for p in CACHE_DIR.iterdir()
+ if p.is_file() and p.suffix.lower() in {".jpg", ".jpeg", ".png", ".webp", ".tif", ".tiff"}
+]
+if len(image_files) < NUM_IMAGES:
+ rng = random.Random(RANDOM_SEED)
+ query = rng.choice(NASA_QUERIES)
+ params = {"q": query, "media_type": "image", "page": "1"}
+ api_url = f"https://images-api.nasa.gov/search?{urllib.parse.urlencode(params)}"
+ items = json.load(urllib.request.urlopen(api_url))["collection"]["items"]
+ rng.shuffle(items)
+
+ for i, item in enumerate(items[:NUM_IMAGES]):
+ href = item["links"][0]["href"]
+ ext = Path(urllib.parse.urlparse(href).path).suffix or ".jpg"
+ urllib.request.urlretrieve(href, CACHE_DIR / f"nasa_{i:03d}{ext}")
+
+dataset = hv.Dataset("nasa_space_demo", persist=False)
+added, skipped = dataset.add_images_dir(str(CACHE_DIR), label_from_folder=False)
+print(f"✓ Loaded {added} NASA space images" + (f" ({skipped} already present)" if skipped else ""))
+
+# %% [markdown]
+# ## Compute Embeddings & UMAP Projection
+#
+# EmbedAnything supports many HuggingFace vision models, but not all.
+# For example, `microsoft/resnet-18` is currently rejected by EmbedAnything
+# ("Model not supported"). Common choices that work:
+# - CLIP: openai/clip-vit-base-patch32, openai/clip-vit-base-patch16, etc.
+# - SigLip: google/siglip-base-patch16-224, google/siglip-large-patch16-384
+# - Jina CLIP: jinaai/jina-clip-v2
+
+# %%
+# Use any HuggingFace model supported by EmbedAnything
+MODEL = "google/siglip-base-patch16-224"
+
+dataset.compute_embeddings(model=MODEL, show_progress=True)
+dataset.compute_visualization()
+
+# %% [markdown]
+# ## Launch HyperView
+
+# %%
+hv.launch(dataset, port=6262, open_browser=True)
+
+
diff --git a/notebooks/umap_tutorial.ipynb b/notebooks/umap_tutorial.ipynb
new file mode 100644
index 0000000..39b0f75
--- /dev/null
+++ b/notebooks/umap_tutorial.ipynb
@@ -0,0 +1,779 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "47ee06e6",
+ "metadata": {
+ "id": "47ee06e6"
+ },
+ "source": [
+ "# HyperView: CLIP vs HyCoCLIP with 2D UMAP Projection on CIFAR-100\n",
+ "\n",
+ "This notebook compares two embedding spaces on the same image set.\n",
+ "\n",
+ "- [**CLIP**](https://github.com/openai/CLIP) maps images and text into one shared vector space.\n",
+ " \n",
+ "- [**HyCoCLIP**](https://github.com/PalAvik/hycoclip) maps images into a space designed for hierarchy and tree like structure.\n",
+ " \n",
+ "- We then compute a **2D projection** of each embedding space with [UMAP](https://umap-learn.readthedocs.io/en/latest/) so we can inspect clusters and label structure.\n",
+ " \n",
+ "\n",
+ "HyperView allows us to compare the embedding spaces of these different geometries.\n",
+ "\n",
+ "In this demo, we use the [CIFAR-100](https://huggingface.co/datasets/uoft-cs/cifar100) dataset of tiny images, available through HuggingFace."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Install\n",
+ "\n",
+ "HyperView is a library for dataset curation and model analysis. It provides tools for interactive visualization of the embedding space."
+ ],
+ "metadata": {
+ "id": "hyaagNXkyUBN"
+ },
+ "id": "hyaagNXkyUBN"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "945a1017",
+ "metadata": {
+ "id": "945a1017"
+ },
+ "outputs": [],
+ "source": [
+ "%%capture\n",
+ "# If you run this in Google Colab, install HyperView with this command.\n",
+ "!uv pip install hyperview"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Import HyperView"
+ ],
+ "metadata": {
+ "id": "hYtO76VBy7C9"
+ },
+ "id": "hYtO76VBy7C9"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "\n",
+ "We import HyperView and use it as the main interface for:\n",
+ "\n",
+ "- reading data\n",
+ " \n",
+ "- computing embeddings with pretrained models\n",
+ " \n",
+ "- computing a 2D projection for visualization\n",
+ " \n",
+ "- launching the interactive viewer\n",
+ " "
+ ],
+ "metadata": {
+ "id": "v-TvwC2Qyw42"
+ },
+ "id": "v-TvwC2Qyw42"
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "58b82256",
+ "metadata": {
+ "id": "58b82256"
+ },
+ "outputs": [],
+ "source": [
+ "import hyperview as hv"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Configuration"
+ ],
+ "metadata": {
+ "id": "nH4CjViEy-U4"
+ },
+ "id": "nH4CjViEy-U4"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "We specify the dataset and model settings in a single dictionary.\n",
+ "\n",
+ "### Data source\n",
+ "\n",
+ "- We load **CIFAR-100** from Hugging Face.\n",
+ " \n",
+ "- We use the **test** split.\n",
+ " \n",
+ "- `img` is the image field.\n",
+ " \n",
+ "- We use `coarse_label` for labels."
+ ],
+ "metadata": {
+ "id": "36tl8PxjzBT6"
+ },
+ "id": "36tl8PxjzBT6"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "Why coarse labels:\n",
+ "\n",
+ "- HyperView disables distinct label coloring when there are more than 20 labels.\n",
+ " \n",
+ "- CIFAR-100 has 100 fine labels, but 20 coarse labels.\n",
+ " \n",
+ "- Using coarse labels keeps label coloring useful in the viewer.\n",
+ " "
+ ],
+ "metadata": {
+ "id": "SIR8CsvfzW3I"
+ },
+ "id": "SIR8CsvfzW3I"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "### Sampling\n",
+ "\n",
+ "- `NUM_SAMPLES = 200` keeps embedding and layout computation fast enough for a demo.\n",
+ " \n",
+ "- Increase this if you want denser clusters, but expect more compute time.\n",
+ " "
+ ],
+ "metadata": {
+ "id": "tXpwwCXozezd"
+ },
+ "id": "tXpwwCXozezd"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "### Models\n",
+ "\n",
+ "- `openai/clip-vit-base-patch32` is a common CLIP baseline.\n",
+ " \n",
+ "- `hycoclip-vit-s` is a HyCoCLIP model that targets hierarchical structure."
+ ],
+ "metadata": {
+ "id": "NCtE2BN0zk3G"
+ },
+ "id": "NCtE2BN0zk3G"
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "DATASET_NAME = \"cifar100_coarse_clip_hyper_models\"\n",
+ "HF_DATASET = \"uoft-cs/cifar100\"\n",
+ "HF_SPLIT = \"test\"\n",
+ "HF_IMAGE_KEY = \"img\"\n",
+ "# NOTE: HyperView disables distinct label coloring when there are >20 labels.\n",
+ "# CIFAR-100 has 100 fine labels, but only 20 coarse labels.\n",
+ "HF_LABEL_KEY = \"coarse_label\"\n",
+ "NUM_SAMPLES = 200\n",
+ "CLIP_MODEL_ID = \"openai/clip-vit-base-patch32\"\n",
+ "HYPER_MODELS_MODEL_ID = \"hycoclip-vit-s\""
+ ],
+ "metadata": {
+ "id": "Wto-RfOFzCdJ"
+ },
+ "id": "Wto-RfOFzCdJ",
+ "execution_count": 6,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Load CIFAR-100 into a HyperView dataset\n",
+ "\n",
+ "We create a HyperView `Dataset` object and then import samples from Hugging Face.\n",
+ "\n",
+ "Key parameters:\n",
+ "\n",
+ "- `persist=False` keeps this dataset in memory for this run.\n",
+ " \n",
+ "- `max_samples=NUM_SAMPLES` subsamples the split.\n",
+ " \n",
+ "\n",
+ "At the end, `len(dataset)` is the number of loaded examples."
+ ],
+ "metadata": {
+ "id": "nEdV-nZTzs3G"
+ },
+ "id": "nEdV-nZTzs3G"
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "print(\"Loading CIFAR-100 from Hugging Face...\")\n",
+ "dataset = hv.Dataset(DATASET_NAME, persist=False)\n",
+ "dataset.add_from_huggingface(\n",
+ " HF_DATASET,\n",
+ " split=HF_SPLIT,\n",
+ " image_key=HF_IMAGE_KEY,\n",
+ " label_key=HF_LABEL_KEY,\n",
+ " max_samples=NUM_SAMPLES,\n",
+ ")\n",
+ "print(f\"Loaded {len(dataset)} samples\")\n",
+ "\n"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "Vb0PAH7gvsP7",
+ "outputId": "953a9176-fa36-4874-9263-838df3bf0180"
+ },
+ "id": "Vb0PAH7gvsP7",
+ "execution_count": 7,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Loading CIFAR-100 from Hugging Face...\n",
+ "Loading 200 samples from uoft-cs/cifar100...\n",
+ "Images saved to: /root/.hyperview/media/huggingface/uoft-cs_cifar100/test\n",
+ "Loaded 200 samples\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Compute CLIP embeddings\n",
+ "\n",
+ "### What CLIP embeddings represent\n",
+ "\n",
+ "CLIP trains an image encoder and a text encoder so that matching image text pairs have nearby vectors. When we compute image embeddings here, each image becomes a single vector in a shared space that also supports text vectors.\n",
+ "\n",
+ "### What we do in code\n",
+ "\n",
+ "- `compute_embeddings(CLIP_MODEL_ID)` runs the CLIP image encoder and stores one vector per image.\n",
+ " \n",
+ "- The returned `space_key` is a handle to that embedding space inside HyperView.\n",
+ " \n",
+ "\n",
+ "### Why we compute a 2D visualization\n",
+ "\n",
+ "High dimensional vectors are hard to inspect. We compute a 2D layout so we can see neighborhood structure.\n",
+ "\n",
+ "- `compute_visualization(..., geometry=\"euclidean\")` treats the embedding space as Euclidean.\n",
+ " \n",
+ "- This matches how CLIP vectors are often used with cosine similarity or dot product in a flat vector space.\n",
+ " \n",
+ "\n",
+ "Note on “UMAP” \n",
+ "This notebook focuses on a 2D projection step. HyperView computes a 2D layout for the viewer. The title calls this UMAP. In practice, you should treat this as a nonlinear projection that preserves local neighborhoods better than PCA."
+ ],
+ "metadata": {
+ "id": "xvAMFY_Dz71Z"
+ },
+ "id": "xvAMFY_Dz71Z"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Compute HyCoCLIP embeddings\n",
+ "\n",
+ "### Why a different geometry\n",
+ "\n",
+ "Some datasets have hierarchical label structure. A tree structure is hard to represent in Euclidean space without distortion.\n",
+ "\n",
+ "Hyperbolic spaces can represent tree growth patterns with less distortion than Euclidean spaces. A common model is the **Poincaré ball**, which is a way to work with hyperbolic geometry in a bounded region.\n",
+ "\n",
+ "### What we do in code\n",
+ "\n",
+ "- `compute_embeddings(model=HYPER_MODELS_MODEL_ID)` computes embeddings from the HyCoCLIP model.\n",
+ " \n",
+ "- `compute_visualization(..., geometry=\"poincare\")` tells HyperView to treat distances and neighborhoods using Poincaré geometry.\n",
+ " \n",
+ "\n",
+ "How to read the plot\n",
+ "\n",
+ "- In Poincaré style layouts, points near the center and points near the boundary can have different distance behavior than Euclidean layouts.\n",
+ " \n",
+ "- Focus on neighborhood membership and cluster separation rather than raw coordinate scale."
+ ],
+ "metadata": {
+ "id": "zEkSKKPN0GZz"
+ },
+ "id": "zEkSKKPN0GZz"
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "clip_space = dataset.compute_embeddings(CLIP_MODEL_ID)\n",
+ "clip_space"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 53
+ },
+ "id": "rXl9yJKyx3hQ",
+ "outputId": "472d1e1e-bde6-4b4c-cfe5-5485c7ca18bd"
+ },
+ "id": "rXl9yJKyx3hQ",
+ "execution_count": 14,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "All 200 samples already have embeddings in space 'embed-anything__openai_clip-vit-base-patch32__4771034973d8'\n"
+ ]
+ },
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "'embed-anything__openai_clip-vit-base-patch32__4771034973d8'"
+ ],
+ "application/vnd.google.colaboratory.intrinsic+json": {
+ "type": "string"
+ }
+ },
+ "metadata": {},
+ "execution_count": 14
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "hyperbolic_clip_space = dataset.compute_embeddings(model=HYPER_MODELS_MODEL_ID)\n",
+ "hyperbolic_clip_space"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 53
+ },
+ "id": "bxivI8P79IiK",
+ "outputId": "16c01abb-848e-4915-c88b-4972a532c7eb"
+ },
+ "id": "bxivI8P79IiK",
+ "execution_count": 10,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Computing embeddings for 200 samples...\n"
+ ]
+ },
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "'hyper-models__hycoclip-vit-s__b63e9ee38a30'"
+ ],
+ "application/vnd.google.colaboratory.intrinsic+json": {
+ "type": "string"
+ }
+ },
+ "metadata": {},
+ "execution_count": 10
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## UMAP Parameters: `n_neighbors` and `min_dist`\n",
+ "\n",
+ "UMAP creates 2D visualizations through a two-phase process:\n",
+ "\n",
+ "1. **Local structure learning** (controlled by `n_neighbors`)\n",
+ "2. **Layout optimization** (controlled by `min_dist`)\n",
+ "\n",
+ "These parameters have different effects depending on whether you're working with Euclidean or hyperbolic embeddings.\n",
+ "\n",
+ "\n",
+ "## `n_neighbors`: Balancing Local vs Global Structure\n",
+ "\n",
+ "The `n_neighbors` parameter determines how many nearby points UMAP considers when learning the manifold structure.\n",
+ "\n",
+ "**Small values (5-15):**\n",
+ "- UMAP focuses on immediate neighbors\n",
+ "- Preserves fine-grained local structure\n",
+ "- Creates tight, distinct clusters\n",
+ "- May fragment large-scale patterns\n",
+ "- Better for finding small subgroups\n",
+ "\n",
+ "**Large values (30-100):**\n",
+ "- UMAP considers broader neighborhoods\n",
+ "- Captures global dataset organization\n",
+ "- Creates more connected layouts\n",
+ "- May blur boundaries between clusters\n",
+ "- Better for understanding overall structure\n",
+ "\n",
+ "**Technical detail:** UMAP constructs a k-nearest neighbor graph using this parameter. The graph encodes which points should remain close in the 2D projection.\n",
+ "\n",
+ "**Default: 15**, which is a middle ground that works for most datasets\n",
+ "\n",
+ "## `min_dist`: Controlling Point Spacing\n",
+ "\n",
+ "The `min_dist` parameter sets the minimum allowed distance between points in the final 2D layout. This is the distance that UMAP forces between **all points** in the 2D layout. When `min-dist` is large points that are in the samle cluster are pushed apart from each other. The visual effect is that we see more empty space everywhere in the plot.\n",
+ "\n",
+ "**Small values (0.0-0.1):**\n",
+ "- Points can be placed very close together\n",
+ "- Clusters appear dense and compact\n",
+ "- Easier to see cluster boundaries\n",
+ "- Can create overlapping point clouds, making them difficult to select with HyperView's lasso tool\n",
+ "- Good for large datasets where you want to see density\n",
+ "\n",
+ "**Large values (0.3-0.99):**\n",
+ "- Forces points to spread out\n",
+ "- Individual points are more visible and the layout appears less crowded\n",
+ "- May exaggerate distances between similar items\n",
+ "- Good for small datasets or when you need to see each point\n",
+ "\n",
+ "**Technical detail:** This parameter affects the \"attractive force\" in UMAP's layout.\n",
+ "\n",
+ "**Default: 0.1** (high density some clumping of embedding projections)"
+ ],
+ "metadata": {
+ "id": "PG3rQ6lNAoyV"
+ },
+ "id": "PG3rQ6lNAoyV"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Parameter Interaction in UMAP\n",
+ "\n",
+ "The two parameters work together to shape your visualization:\n",
+ "\n",
+ "**n_neighbors + min_dist together:**\n",
+ "\n",
+ "| n_neighbors | min_dist | Result |\n",
+ "|------------|----------|--------|\n",
+ "| Small (5) | Small (0.01) | Many tight, separated micro-clusters |\n",
+ "| Small (5) | Large (0.5) | Fragmented layout with forced spacing |\n",
+ "| Large (50) | Small (0.01) | Dense, continuous manifold structure |\n",
+ "| Large (50) | Large (0.5) | Smooth, evenly distributed layout |\n",
+ "\n",
+ "**Practical example:**\n",
+ "```python\n",
+ "# For finding fine subgroups in cell types\n",
+ "compute_visualization(n_neighbors=10, min_dist=0.05)\n",
+ "\n",
+ "# For understanding broad relationships in a corpus\n",
+ "compute_visualization(n_neighbors=50, min_dist=0.3)"
+ ],
+ "metadata": {
+ "id": "zV_EJELHBkKt"
+ },
+ "id": "zV_EJELHBkKt"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Parameter Interaction in UMAP\n",
+ "\n",
+ "The two parameters work together to shape your visualization:\n",
+ "\n",
+ "**n_neighbors + min_dist together:**\n",
+ "\n",
+ "| n_neighbors | min_dist | Result |\n",
+ "|------------|----------|--------|\n",
+ "| Small (5) | Small (0.01) | Many tight, separated and small clusters |\n",
+ "| Small (5) | Large (0.5) | Fragmented layout with forced spacing |\n",
+ "| Large (50) | Small (0.01) | Dense, continuous structure |\n",
+ "| Large (50) | Large (0.5) | Smooth, evenly distributed layout |\n",
+ "\n",
+ "**Snippets:**\n",
+ "```python\n",
+ "# For finding fine subgroups in cell types\n",
+ "compute_visualization(n_neighbors=10, min_dist=0.05)\n",
+ "\n",
+ "# For understanding broad relationships in a corpus\n",
+ "compute_visualization(n_neighbors=50, min_dist=0.3)\n",
+ "\n",
+ "\n",
+ "\n"
+ ],
+ "metadata": {
+ "id": "p4-fczUnDfds"
+ },
+ "id": "p4-fczUnDfds"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "### Euclidean UMAP (Standard Embeddings)\n",
+ "\n",
+ "```markdown\n",
+ "## UMAP on Euclidean Embeddings\n",
+ "\n",
+ "For standard models (CLIP, ResNet, etc.), embeddings live in flat Euclidean space.\n",
+ "\n",
+ "**What happens:**\n",
+ "1. UMAP measures distances using the specified metric (default: cosine)\n",
+ "2. Constructs a neighbor graph in the high-dimensional space\n",
+ "3. Optimizes a 2D layout in flat Euclidean space\n",
+ "4. Output coordinates are (x, y) pairs with no geometric constraints (can be as far away from each other as originally computed)\n",
+ "\n",
+ "**Metric choice matters:**\n",
+ "- `metric=\"cosine\"`: Best for normalized embeddings (CLIP, sentence transformers)\n",
+ "- `metric=\"euclidean\"`: Best for embeddings where magnitude matters\n",
+ "- `metric=\"manhattan\"`: Sometimes better for sparse or categorical data"
+ ],
+ "metadata": {
+ "id": "zXoChx71EfDP"
+ },
+ "id": "zXoChx71EfDP"
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "dataset.compute_visualization(space_key=clip_space, # Which embedding space to project\n",
+ " method=\"umap\", # Projection method (only 'umap' supported)\n",
+ " geometry=\"euclidean\", # Output geometry: 'euclidean' or 'poincare'\n",
+ " n_neighbors=15, # UMAP: Number of neighbors (default: 15)\n",
+ " min_dist=0.1, # UMAP: Minimum distance (default: 0.1)\n",
+ " metric=\"cosine\", # UMAP: Distance metric (default: 'cosine')\n",
+ " force=True) # Force recomputation if layout exists\n"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 53
+ },
+ "id": "Fd50hTlc9K31",
+ "outputId": "7272fbef-0a2e-4d2c-8178-9b33764a20c2"
+ },
+ "id": "Fd50hTlc9K31",
+ "execution_count": 9,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Computing euclidean umap layout for 200 samples...\n"
+ ]
+ },
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "'embed-anything__openai_clip-vit-base-patch32__4771034973d8__euclidean_umap_92b543de'"
+ ],
+ "application/vnd.google.colaboratory.intrinsic+json": {
+ "type": "string"
+ }
+ },
+ "metadata": {},
+ "execution_count": 9
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "### Cell 6: Hyperbolic UMAP (HyCoCLIP and Hyperboloid Embeddings)\n",
+ "\n",
+ "```markdown\n",
+ "## UMAP on Hyperbolic Embeddings\n",
+ "\n",
+ "For hyperbolic models (HyCoCLIP), embeddings live in curved hyperbolic space (hyperboloid model).\n",
+ "\n",
+ "**What happens:**\n",
+ "1. HyperView converts hyperboloid coordinates to the Poincaré ball (unit disk)\n",
+ "2. UMAP measures distances using Poincaré distance (hyperbolic geometry)\n",
+ "3. Optimizes a 2D layout using hyperbolic distance in the output space\n",
+ "4. Output coordinates are (x, y) pairs **constrained to the unit disk**\n",
+ "\n",
+ "**Key differences from Euclidean:**\n",
+ "- Distance metric is **automatically** \"poincare\" (you cannot override this)\n",
+ "- Points near the disk center have more \"room\" (hyperbolic space expands near edges)\n",
+ "- Same Euclidean distance means different hyperbolic distances at different radii\n",
+ "- Natural hierarchy: broader or more ambiguous concepts toward center, specific items toward edges. Mislabeled examples also appear near the center as they are hard to separate.\n",
+ "\n",
+ "**Parameter effects:**\n",
+ "- `n_neighbors`: Works the same as Euclidean (controls local vs global)\n",
+ "- `min_dist`: Interprets distances **in hyperbolic geometry**\n",
+ " - Same numeric value creates different visual spacing than in Euclidean\n",
+ " - Points near the boundary can appear closer in visual distance but farther in hyperbolic distance\n",
+ "\n",
+ "**Example:**\n",
+ "```python\n",
+ "# Hyperbolic CLIP embeddings\n",
+ "dataset.compute_embeddings(model=\"hycoclip-vit-s\") # Creates hyperboloid space\n",
+ "dataset.compute_visualization(\n",
+ " geometry=\"poincare\", # Output to Poincaré disk\n",
+ " # metric is automatically \"poincare\" (ignores what you pass)\n",
+ " n_neighbors=15,\n",
+ " min_dist=0.1\n",
+ ")"
+ ],
+ "metadata": {
+ "id": "etnAtM19EyUH"
+ },
+ "id": "etnAtM19EyUH"
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "dataset.compute_visualization(space_key=hyperbolic_clip_space,\n",
+ " method=\"umap\",\n",
+ " geometry=\"poincare\", # Use Poincare ball method to project hyperbolic embedding to 2D plane\n",
+ " # metric is overriden to \"poincare\", see https://github.com/Hyper3Labs/HyperView/blob/main/src/hyperview/embeddings/projection.py#L102\n",
+ " n_neighbors=15,\n",
+ " min_dist=0.1,\n",
+ " )"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 53
+ },
+ "id": "tXWs_FXQ9OX2",
+ "outputId": "156e937b-8d5c-4321-872b-bce25de69648"
+ },
+ "id": "tXWs_FXQ9OX2",
+ "execution_count": 15,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Computing poincare umap layout for 200 samples...\n"
+ ]
+ },
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "'hyper-models__hycoclip-vit-s__b63e9ee38a30__poincare_umap_92b543de'"
+ ],
+ "application/vnd.google.colaboratory.intrinsic+json": {
+ "type": "string"
+ }
+ },
+ "metadata": {},
+ "execution_count": 15
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Launch the interactive app in HyperView\n",
+ "\n",
+ "`hv.launch(dataset, open_browser=True)` starts a local server and opens the viewer.\n",
+ "\n",
+ "In the UI, you should be able to:\n",
+ "\n",
+ "- switch between embedding spaces (CLIP vs HyCoCLIP)\n",
+ " \n",
+ "- inspect image thumbnails for selected points\n",
+ " \n",
+ "- compare how coarse labels cluster under each geometry\n",
+ " \n"
+ ],
+ "metadata": {
+ "id": "rGEUqA0W0O4g"
+ },
+ "id": "rGEUqA0W0O4g"
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "hv.launch(dataset, open_browser=True)"
+ ],
+ "metadata": {
+ "id": "o3TdCwD2wIf2"
+ },
+ "id": "o3TdCwD2wIf2",
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Euclidean vs Poincaré: Visual Differences\n",
+ "\n",
+ "**Euclidean geometry (`geometry=\"euclidean\"`):**\n",
+ "- Points spread across an unbounded 2D plane\n",
+ "- Distance is uniform everywhere (1 unit = 1 unit, regardless of location)\n",
+ "- Typical output: points in a square/rectangular region\n",
+ "- Best for: Datasets without strong hierarchical structure\n",
+ "\n",
+ "**Poincaré geometry (`geometry=\"poincare\"`):**\n",
+ "- Points constrained to a unit disk (circle with radius 1)\n",
+ "- Distance is **non-uniform**: more space near edges, compressed near center\n",
+ "- Typical output: hierarchical organization\n",
+ " - General/abstract concepts cluster near the center\n",
+ " - Specific/detailed items pushed toward the boundary\n",
+ "- Best for: Datasets with natural hierarchies (taxonomies, concept trees)\n",
+ "\n",
+ "**When to use Poincaré output:**\n",
+ "1. You have hyperbolic embeddings (HyCoCLIP)\n",
+ "2. Your data has hierarchical structure (animal taxonomy, document topics)\n",
+ "3. You want to visualize relationships at multiple scales simultaneously\n",
+ "\n",
+ "**When to use Euclidean output:**\n",
+ "1. You have standard embeddings (CLIP, ResNet)\n",
+ "2. Your data has flat or network structure\n",
+ "3. You want familiar, intuitive spatial relationships\n",
+ "\n"
+ ],
+ "metadata": {
+ "id": "FqwDwyE9Fdpw"
+ },
+ "id": "FqwDwyE9Fdpw"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Suggested checks\n",
+ "\n",
+ "- Are nearest neighbors under Euclidean CLIP similar to nearest neighbors under Poincaré HyCoCLIP?\n",
+ "\n",
+ "\n",
+ " "
+ ],
+ "metadata": {
+ "id": "FRjieuy10Yed"
+ },
+ "id": "FRjieuy10Yed"
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.8"
+ },
+ "colab": {
+ "provenance": [],
+ "machine_shape": "hm",
+ "gpuType": "T4"
+ },
+ "accelerator": "GPU"
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 8fb24bd..fc26f6c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,12 +1,12 @@
[project]
name = "hyperview"
-version = "0.1.0"
+dynamic = ["version"]
description = "Open-source dataset curation with hyperbolic embeddings visualization"
readme = "README.md"
license = { text = "MIT" }
requires-python = ">=3.10"
authors = [
- { name = "HyperView Team" }
+ { name = "hyper3labs" }
]
keywords = ["embeddings", "visualization", "hyperbolic", "dataset", "curation", "machine-learning"]
classifiers = [
@@ -23,43 +23,60 @@ classifiers = [
]
dependencies = [
- "fastapi>=0.115.0",
- "uvicorn[standard]>=0.32.0",
- "embed-anything>=0.3.0",
- "numpy>=1.26.0",
- "umap-learn>=0.5.6",
- "pillow>=10.0.0",
- "pydantic>=2.0.0",
- "aiofiles>=24.0.0",
- "datasets>=3.0.0",
+ "fastapi>=0.128.0",
+ "uvicorn[standard]>=0.40.0",
+ "embed-anything>=0.7.0",
+ "hyper-models>=0.1.0", # PyPI package: https://pypi.org/project/hyper-models/
+ "numpy>=1.26.4,<2.4",
+ "umap-learn>=0.5.11",
+ "pillow>=12.1.0",
+ "pydantic>=2.12.5",
+ "aiofiles>=25.1.0",
+ "datasets>=4.5.0",
+ "lancedb>=0.26.1",
+ "pyarrow>=22.0.0",
]
[project.optional-dependencies]
dev = [
- "pytest>=8.0.0",
- "pytest-asyncio>=0.24.0",
- "httpx>=0.27.0",
- "ruff>=0.7.0",
+ "pytest>=9.0.2",
+ "pytest-asyncio>=1.3.0",
+ "httpx>=0.28.1",
+ "ruff>=0.14.13",
]
-hyperbolic = [
- "torch>=2.0.0",
- "geoopt>=0.5.1",
+ml = [
+ "torch>=2.9.1",
+ "torchvision>=0.24.1",
+ "timm>=1.0.0",
]
[project.scripts]
hyperview = "hyperview.cli:main"
[project.urls]
-Homepage = "https://github.com/merantix/HyperView"
-Documentation = "https://github.com/merantix/HyperView#readme"
-Repository = "https://github.com/merantix/HyperView"
+Homepage = "https://github.com/Hyper3Labs/HyperView"
+Documentation = "https://github.com/Hyper3Labs/HyperView#readme"
+Repository = "https://github.com/Hyper3Labs/HyperView"
+Issues = "https://github.com/Hyper3Labs/HyperView/issues"
[build-system]
-requires = ["hatchling"]
+requires = ["hatchling", "hatch-vcs"]
build-backend = "hatchling.build"
+[tool.hatch.metadata]
+allow-direct-references = true
+
+[tool.hatch.version]
+source = "vcs"
+
+[tool.hatch.build.hooks.vcs]
+version-file = "src/hyperview/_version.py"
+
[tool.hatch.build.targets.wheel]
packages = ["src/hyperview"]
+# Include frontend static assets (pre-built before packaging)
+# artifacts is for untracked files that exist; force-include ensures they're packaged
+artifacts = ["src/hyperview/server/static/**"]
[tool.hatch.build.targets.sdist]
include = [
@@ -67,6 +84,8 @@ include = [
"/README.md",
"/LICENSE",
]
+# Include built frontend for reproducible source builds
+artifacts = ["src/hyperview/server/static/**"]
[tool.ruff]
line-length = 100
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index c747cac..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-torch>=2.9.1
-numpy>=2.3.5
-matplotlib>=3.10.7
-geoopt>=0.5.1
-geomstats>=2.8.0
-scikit-learn>=1.7.2
diff --git a/scripts/create_cookoff_fixture.py b/scripts/create_cookoff_fixture.py
new file mode 100644
index 0000000..815f9d2
--- /dev/null
+++ b/scripts/create_cookoff_fixture.py
@@ -0,0 +1,171 @@
+"""Create a lightweight Cosmos-Curate-like fixture for local UI development.
+
+This script builds a synthetic split output using existing MP4 clips so the
+frontend/backend can be developed without running full cosmos-curate pipelines.
+
+Output structure:
+ /
+ split_output/
+ clips/*.mp4
+ metas/v0/*.json
+ previews/*_0_20.webp
+ ce1_embd/*.pickle
+ summary.json
+ dedup_output/
+ kmeans_centroids.npy
+
+Usage:
+ uv run python scripts/create_cookoff_fixture.py \
+ --source-clips-path /path/to/mp4s \
+ --output-root /tmp/cookoff_fixture \
+ --num-clips 40
+"""
+
+from __future__ import annotations
+
+import argparse
+import json
+import pickle
+import random
+import shutil
+import uuid
+from pathlib import Path
+
+import numpy as np
+from PIL import Image, ImageDraw
+
+
+CAPTION_TEMPLATES = [
+ (
+ "The ego vehicle approaches an intersection with moderate traffic. "
+ "A lead vehicle slows down near a traffic signal while pedestrians remain on the sidewalk."
+ ),
+ (
+ "The car continues through an urban road segment with lane markers clearly visible. "
+ "Oncoming traffic is sparse and no sudden hazards are observed."
+ ),
+ (
+ "The vehicle moves past parked cars and approaches a crosswalk. "
+ "A cyclist appears on the right edge of the scene and the ego car reduces speed."
+ ),
+ (
+ "The camera captures a highway merge with multiple vehicles changing lanes. "
+ "The ego vehicle keeps steady distance and follows lane guidance."
+ ),
+]
+
+
+def _normalize_dir(path_str: str, *, create: bool = False) -> Path:
+ path = Path(path_str).expanduser().resolve()
+ if create:
+ path.mkdir(parents=True, exist_ok=True)
+ elif not path.exists():
+ raise FileNotFoundError(f"Path does not exist: {path}")
+ return path
+
+
+def _make_preview(path: Path, *, index: int, clip_id: str) -> None:
+ width, height = 640, 360
+ img = Image.new("RGB", (width, height), color=(20 + (index * 17) % 120, 40, 70 + (index * 11) % 120))
+ draw = ImageDraw.Draw(img)
+ draw.rectangle((16, 16, width - 16, height - 16), outline=(255, 255, 255), width=2)
+ draw.text((28, 28), f"Cookoff Fixture Clip {index + 1}", fill=(255, 255, 255))
+ draw.text((28, 60), clip_id[:18], fill=(220, 220, 220))
+ path.parent.mkdir(parents=True, exist_ok=True)
+ img.save(path, "WEBP", quality=92)
+
+
+def _make_caption(index: int) -> str:
+ base = CAPTION_TEMPLATES[index % len(CAPTION_TEMPLATES)]
+ return (
+ f"{base} The model reasons over traffic flow, relative motion, and road context. \n"
+ f"{base} "
+ )
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser(description="Create synthetic Cosmos-Curate fixture")
+ parser.add_argument("--source-clips-path", required=True, help="Directory containing source MP4 clips")
+ parser.add_argument("--output-root", default="/tmp/cookoff_fixture", help="Fixture output root")
+ parser.add_argument("--num-clips", type=int, default=40, help="Number of clips to include")
+ parser.add_argument(
+ "--embedding-dim",
+ type=int,
+ default=256,
+ help="Embedding vector dimension (default matches cosmos-embed1-224p)",
+ )
+ parser.add_argument("--seed", type=int, default=42, help="Random seed")
+ args = parser.parse_args()
+
+ rng = np.random.default_rng(args.seed)
+ random.seed(args.seed)
+
+ source_dir = _normalize_dir(args.source_clips_path)
+ output_root = _normalize_dir(args.output_root, create=True)
+
+ split_output = _normalize_dir(str(output_root / "split_output"), create=True)
+ dedup_output = _normalize_dir(str(output_root / "dedup_output"), create=True)
+
+ clips_dir = _normalize_dir(str(split_output / "clips"), create=True)
+ metas_dir = _normalize_dir(str(split_output / "metas" / "v0"), create=True)
+ previews_dir = _normalize_dir(str(split_output / "previews"), create=True)
+ embd_dir = _normalize_dir(str(split_output / "ce1_embd"), create=True)
+
+ source_clips = sorted(source_dir.glob("*.mp4"))
+ if not source_clips:
+ raise RuntimeError(f"No .mp4 files found under {source_dir}")
+
+ selected = source_clips[: max(1, min(args.num_clips, len(source_clips)))]
+
+ for index, src_clip in enumerate(selected):
+ clip_id = str(uuid.uuid4())
+ dst_clip = clips_dir / f"{clip_id}.mp4"
+ shutil.copy2(src_clip, dst_clip)
+
+ preview_path = previews_dir / f"{clip_id}_0_20.webp"
+ _make_preview(preview_path, index=index, clip_id=clip_id)
+
+ caption = _make_caption(index)
+ meta = {
+ "clip_uuid": clip_id,
+ "source_video": str(src_clip),
+ "clip_location": str(dst_clip),
+ "span": [0.0, 2.0],
+ "dimensions": [1600, 900],
+ "framerate": 10.0,
+ "num_frames": 20,
+ "aesthetic_score": round(random.uniform(4.8, 9.6), 2),
+ "motion_score": round(random.uniform(3.1, 9.8), 2),
+ "captions": {"cosmos_r2": caption},
+ }
+ (metas_dir / f"{clip_id}.json").write_text(json.dumps(meta, indent=2), encoding="utf-8")
+
+ vec = rng.normal(loc=0.0, scale=1.0, size=(args.embedding_dim,)).astype(np.float32)
+ with (embd_dir / f"{clip_id}.pickle").open("wb") as f:
+ pickle.dump(vec, f)
+
+ summary = {
+ "total_videos_processed": len(selected),
+ "total_clips_generated": len(selected),
+ "pipeline_duration_seconds": round(2.5 + len(selected) * 0.05, 2),
+ "models_used": ["transnetv2", "cosmos_r2", "cosmos_embed1_224p"],
+ "embedding_algorithm": "cosmos-embed1-224p",
+ "total_num_clips_passed": len(selected),
+ "total_num_clips_with_caption": len(selected),
+ "total_num_clips_with_embeddings": len(selected),
+ "num_input_videos": len(selected),
+ "num_processed_videos": len(selected),
+ "pipeline_run_time": round(0.2 + len(selected) * 0.02, 2),
+ "total_video_duration": float(len(selected) * 2),
+ "total_clip_duration": float(len(selected) * 2),
+ }
+ (split_output / "summary.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
+
+ np.save(dedup_output / "kmeans_centroids.npy", rng.normal(size=(4, args.embedding_dim)).astype(np.float32))
+
+ print(f"Created fixture at: {output_root}")
+ print(f"Clips: {len(selected)}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/curate_submission_metrics.py b/scripts/curate_submission_metrics.py
new file mode 100644
index 0000000..c87a6a3
--- /dev/null
+++ b/scripts/curate_submission_metrics.py
@@ -0,0 +1,262 @@
+"""Compute submission KPIs from Cosmos-Curate outputs.
+
+This script aggregates split + dedup artifacts into metrics used in the hackathon
+submission and demo narrative.
+"""
+
+from __future__ import annotations
+
+import argparse
+import csv
+import json
+from pathlib import Path
+from typing import Any
+
+import pyarrow.parquet as pq
+
+
+def _normalize_path(path_str: str) -> Path:
+ path = Path(path_str).expanduser().resolve()
+ if not path.exists():
+ raise FileNotFoundError(f"Path does not exist: {path}")
+ return path
+
+
+def _read_json(path: Path) -> dict[str, Any]:
+ with path.open("r", encoding="utf-8") as f:
+ return json.load(f)
+
+
+def _dedup_summary_path(dedup_output_path: Path, dedup_eps: float) -> Path:
+ tag = f"{dedup_eps:.6g}".rstrip("0").rstrip(".")
+ return dedup_output_path / "extraction" / f"dedup_summary_{tag}.csv"
+
+
+def _safe_div(numerator: float, denominator: float) -> float:
+ if denominator <= 0:
+ return 0.0
+ return numerator / denominator
+
+
+def _load_split_metrics(split_output_path: Path) -> dict[str, Any]:
+ summary_path = split_output_path / "summary.json"
+ if not summary_path.exists():
+ raise FileNotFoundError(f"Missing split summary file: {summary_path}")
+
+ summary = _read_json(summary_path)
+ clips_passed = int(summary.get("total_num_clips_passed", 0))
+ clips_with_caption = int(summary.get("total_num_clips_with_caption", 0))
+ clips_with_embeddings = int(summary.get("total_num_clips_with_embeddings", 0))
+ clips_transcoded = int(summary.get("total_num_clips_transcoded", 0))
+
+ return {
+ "embedding_algorithm": summary.get("embedding_algorithm"),
+ "num_input_videos": int(summary.get("num_input_videos", 0)),
+ "num_processed_videos": int(summary.get("num_processed_videos", 0)),
+ "pipeline_run_time_min": float(summary.get("pipeline_run_time", 0.0)),
+ "total_video_duration_sec": float(summary.get("total_video_duration", 0.0)),
+ "total_clip_duration_sec": float(summary.get("total_clip_duration", 0.0)),
+ "clips_passed": clips_passed,
+ "clips_transcoded": clips_transcoded,
+ "clips_with_caption": clips_with_caption,
+ "clips_with_embeddings": clips_with_embeddings,
+ "caption_coverage_pct": _safe_div(clips_with_caption, clips_passed) * 100.0,
+ "embedding_coverage_pct": _safe_div(clips_with_embeddings, clips_passed) * 100.0,
+ }
+
+
+def _load_dedup_metrics(dedup_output_path: Path, dedup_eps: float) -> dict[str, Any]:
+ summary_path = _dedup_summary_path(dedup_output_path, dedup_eps)
+
+ if summary_path.exists():
+ with summary_path.open("r", encoding="utf-8") as f:
+ reader = csv.DictReader(f)
+ rows = list(reader)
+ if rows:
+ row = rows[0]
+ kept = int(row.get("kept", 0))
+ removed = int(row.get("removed", 0))
+ total = int(row.get("total", 0))
+ reduction_pct = _safe_div(removed, total) * 100.0
+ return {
+ "eps": float(row.get("eps", dedup_eps)),
+ "kept": kept,
+ "removed": removed,
+ "total": total,
+ "reduction_pct": reduction_pct,
+ }
+
+ threshold = 1.0 - dedup_eps
+ pruning_dir = dedup_output_path / "extraction" / "semdedup_pruning_tables"
+ if not pruning_dir.exists():
+ return {
+ "eps": dedup_eps,
+ "kept": 0,
+ "removed": 0,
+ "total": 0,
+ "reduction_pct": 0.0,
+ }
+
+ kept = 0
+ total = 0
+ for parquet_path in sorted(pruning_dir.glob("cluster_*.parquet")):
+ table = pq.read_table(parquet_path, columns=["cosine_sim_score"])
+ scores = table.column("cosine_sim_score").to_pylist()
+ for score in scores:
+ if score is None:
+ continue
+ total += 1
+ if float(score) <= threshold:
+ kept += 1
+
+ removed = max(total - kept, 0)
+ return {
+ "eps": dedup_eps,
+ "kept": kept,
+ "removed": removed,
+ "total": total,
+ "reduction_pct": _safe_div(removed, total) * 100.0,
+ }
+
+
+def _top_duplicate_pairs(
+ dedup_output_path: Path,
+ *,
+ dedup_eps: float,
+ top_k: int,
+) -> list[dict[str, Any]]:
+ pruning_dir = dedup_output_path / "extraction" / "semdedup_pruning_tables"
+ if not pruning_dir.exists() or top_k <= 0:
+ return []
+
+ threshold = 1.0 - dedup_eps
+ rows: list[dict[str, Any]] = []
+
+ for parquet_path in sorted(pruning_dir.glob("cluster_*.parquet")):
+ table = pq.read_table(parquet_path, columns=["id", "max_id", "cosine_sim_score"])
+ ids = table.column("id").to_pylist()
+ max_ids = table.column("max_id").to_pylist()
+ scores = table.column("cosine_sim_score").to_pylist()
+
+ for clip_id, max_id, score in zip(ids, max_ids, scores):
+ if clip_id is None or max_id is None or score is None:
+ continue
+ score_f = float(score)
+ if score_f <= threshold:
+ continue
+ rows.append(
+ {
+ "id": str(clip_id),
+ "max_id": str(max_id),
+ "cosine_sim_score": score_f,
+ "cluster_file": parquet_path.name,
+ }
+ )
+
+ rows.sort(key=lambda x: x["cosine_sim_score"], reverse=True)
+ return rows[:top_k]
+
+
+def _to_markdown(metrics: dict[str, Any]) -> str:
+ split_metrics = metrics["split"]
+ dedup_metrics = metrics.get("dedup")
+
+ lines = [
+ "# Cosmos-Curate Submission Metrics",
+ "",
+ "## Split Pipeline",
+ "",
+ f"- Input videos: **{split_metrics['num_input_videos']}**",
+ f"- Processed videos: **{split_metrics['num_processed_videos']}**",
+ f"- Clips passed: **{split_metrics['clips_passed']}**",
+ f"- Caption coverage: **{split_metrics['caption_coverage_pct']:.2f}%**",
+ f"- Embedding coverage: **{split_metrics['embedding_coverage_pct']:.2f}%**",
+ f"- Embedding algorithm: **{split_metrics['embedding_algorithm']}**",
+ f"- Pipeline runtime: **{split_metrics['pipeline_run_time_min']:.2f} min**",
+ ]
+
+ if dedup_metrics is not None:
+ lines += [
+ "",
+ "## Semantic Dedup",
+ "",
+ f"- Epsilon: **{dedup_metrics['eps']}**",
+ f"- Total clips considered: **{dedup_metrics['total']}**",
+ f"- Kept clips: **{dedup_metrics['kept']}**",
+ f"- Removed duplicates: **{dedup_metrics['removed']}**",
+ f"- Reduction: **{dedup_metrics['reduction_pct']:.2f}%**",
+ ]
+
+ top_pairs = metrics.get("top_duplicate_pairs", [])
+ if top_pairs:
+ lines += [
+ "",
+ "## Top Duplicate Pairs",
+ "",
+ "| id | max_id | cosine_sim_score | cluster_file |",
+ "|---|---|---:|---|",
+ ]
+ for row in top_pairs:
+ lines.append(
+ f"| {row['id']} | {row['max_id']} | {row['cosine_sim_score']:.6f} | {row['cluster_file']} |"
+ )
+
+ return "\n".join(lines) + "\n"
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser(description="Compute hackathon KPIs from Cosmos-Curate outputs")
+ parser.add_argument("--split-output-path", required=True, help="Path to Cosmos-Curate split output")
+ parser.add_argument("--dedup-output-path", default=None, help="Path to Cosmos-Curate dedup output")
+ parser.add_argument("--dedup-eps", type=float, default=0.01, help="Epsilon used in dedup")
+ parser.add_argument("--top-duplicates", type=int, default=20, help="Top duplicate pairs to include")
+ parser.add_argument(
+ "--output-json",
+ default=None,
+ help="Output JSON path (default: /submission_metrics.json)",
+ )
+ parser.add_argument(
+ "--output-markdown",
+ default=None,
+ help="Output markdown path (default: /submission_metrics.md)",
+ )
+ args = parser.parse_args()
+
+ split_output_path = _normalize_path(args.split_output_path)
+ dedup_output_path = _normalize_path(args.dedup_output_path) if args.dedup_output_path else None
+
+ output_json = (
+ Path(args.output_json).expanduser().resolve()
+ if args.output_json
+ else split_output_path / "submission_metrics.json"
+ )
+ output_markdown = (
+ Path(args.output_markdown).expanduser().resolve()
+ if args.output_markdown
+ else split_output_path / "submission_metrics.md"
+ )
+
+ metrics: dict[str, Any] = {
+ "split": _load_split_metrics(split_output_path),
+ }
+
+ if dedup_output_path is not None:
+ metrics["dedup"] = _load_dedup_metrics(dedup_output_path, args.dedup_eps)
+ metrics["top_duplicate_pairs"] = _top_duplicate_pairs(
+ dedup_output_path,
+ dedup_eps=args.dedup_eps,
+ top_k=args.top_duplicates,
+ )
+
+ output_json.parent.mkdir(parents=True, exist_ok=True)
+ output_json.write_text(json.dumps(metrics, indent=2, sort_keys=True) + "\n", encoding="utf-8")
+
+ output_markdown.parent.mkdir(parents=True, exist_ok=True)
+ output_markdown.write_text(_to_markdown(metrics), encoding="utf-8")
+
+ print(f"Wrote metrics JSON: {output_json}")
+ print(f"Wrote metrics markdown: {output_markdown}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/demo.py b/scripts/demo.py
index 7cde441..dbb821c 100644
--- a/scripts/demo.py
+++ b/scripts/demo.py
@@ -1,7 +1,8 @@
#!/usr/bin/env python3
-"""Run HyperView demo with CIFAR-100 dataset."""
+"""Run HyperView demo with CIFAR-10 dataset."""
import argparse
+import os
import sys
from pathlib import Path
@@ -12,37 +13,69 @@
def main():
parser = argparse.ArgumentParser(description="Run HyperView demo")
parser.add_argument(
- "--samples", type=int, default=500, help="Number of samples to load (default: 500)"
+ "--dataset",
+ type=str,
+ default="cifar10_demo",
+ help="Dataset name to use for persistence (default: cifar10_demo)",
)
parser.add_argument(
- "--port", type=int, default=5151, help="Port to run server on (default: 5151)"
+ "--samples", type=int, default=50000, help="Number of samples to load (default: 50000)"
+ )
+ parser.add_argument(
+ "--port", type=int, default=6263, help="Port to run server on (default: 6263)"
)
parser.add_argument(
"--no-browser", action="store_true", help="Don't open browser automatically"
)
+ parser.add_argument(
+ "--no-persist", action="store_true", help="Don't persist to database (use in-memory)"
+ )
+ parser.add_argument(
+ "--model",
+ type=str,
+ default="openai/clip-vit-base-patch32",
+ help=(
+ "Embedding model_id to use (default: openai/clip-vit-base-patch32). "
+ "This is passed to Dataset.compute_embeddings(model=...)."
+ ),
+ )
+ parser.add_argument(
+ "--datasets-dir",
+ "--database-dir",
+ type=str,
+ default=None,
+ help="Override persistence directory (sets HYPERVIEW_DATASETS_DIR)",
+ )
+ parser.add_argument(
+ "--no-server",
+ action="store_true",
+ help="Don't start the web server (useful for CI / DB checks)",
+ )
args = parser.parse_args()
+ if args.datasets_dir:
+ os.environ["HYPERVIEW_DATASETS_DIR"] = args.datasets_dir
+
import hyperview as hv
- print(f"Loading {args.samples} samples from CIFAR-100...")
- dataset = hv.Dataset("cifar100_demo")
- count = dataset.add_from_huggingface(
- "uoft-cs/cifar100",
+ dataset = hv.Dataset(args.dataset, persist=not args.no_persist)
+
+ dataset.add_from_huggingface(
+ "uoft-cs/cifar10",
split="train",
image_key="img",
- label_key="fine_label",
+ label_key="label",
max_samples=args.samples,
)
- print(f"Loaded {count} samples")
-
- print("Computing embeddings...")
- dataset.compute_embeddings(show_progress=True)
- print("Computing visualization (UMAP + Poincare)...")
- dataset.compute_visualization()
+ space_key = dataset.compute_embeddings(model=args.model, show_progress=True)
+ # Compute a single layout for the UI to display by default.
+ # Switch to geometry="euclidean" for standard 2D UMAP.
+ dataset.compute_visualization(space_key=space_key, geometry="poincare")
- print(f"Starting server at http://127.0.0.1:{args.port}")
+ if args.no_server:
+ return
hv.launch(dataset, port=args.port, open_browser=not args.no_browser)
diff --git a/scripts/demo_hyperbolic_clip.py b/scripts/demo_hyperbolic_clip.py
new file mode 100644
index 0000000..7e781eb
--- /dev/null
+++ b/scripts/demo_hyperbolic_clip.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+"""Demo: CLIP (Euclidean) + HyCoCLIP (Poincaré) on Imagenette."""
+
+import hyperview as hv
+
+DATASET_NAME = "imagenette_val_clip_hycoclip_demo"
+HF_DATASET = "Multimodal-Fatima/Imagenette_validation"
+HF_SPLIT = "validation"
+HF_IMAGE_KEY = "image"
+HF_LABEL_KEY = "label"
+NUM_SAMPLES = 300
+CLIP_MODEL_ID = "openai/clip-vit-base-patch32"
+HYPER_MODELS_MODEL_ID = "hycoclip-vit-s"
+
+
+def main() -> None:
+ print("Loading Imagenette validation from Hugging Face...")
+ dataset = hv.Dataset(DATASET_NAME, persist=False)
+ dataset.add_from_huggingface(
+ HF_DATASET,
+ split=HF_SPLIT,
+ image_key=HF_IMAGE_KEY,
+ label_key=HF_LABEL_KEY,
+ max_samples=NUM_SAMPLES,
+ shuffle=True,
+ )
+ print(f"Loaded {len(dataset)} samples")
+
+ clip_space = dataset.compute_embeddings(CLIP_MODEL_ID)
+ dataset.compute_visualization(space_key=clip_space, geometry="euclidean")
+ hyper_space = dataset.compute_embeddings(model=HYPER_MODELS_MODEL_ID)
+ dataset.compute_visualization(space_key=hyper_space, geometry="poincare")
+
+ print("Launching at http://127.0.0.1:6263")
+
+ hv.launch(dataset, open_browser=True, port=6263)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/export_frontend.sh b/scripts/export_frontend.sh
index 137545b..dd1cc37 100755
--- a/scripts/export_frontend.sh
+++ b/scripts/export_frontend.sh
@@ -6,14 +6,25 @@ set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
FRONTEND_DIR="$PROJECT_ROOT/frontend"
+HYPER_SCATTER_DIR="$PROJECT_ROOT/hyper-scatter"
STATIC_DIR="$PROJECT_ROOT/src/hyperview/server/static"
-echo "🔄 Building frontend..."
+# Build hyper-scatter library if it's a local checkout
+if [ -d "$HYPER_SCATTER_DIR" ] && [ -f "$HYPER_SCATTER_DIR/package.json" ]; then
+ echo "Building hyper-scatter library..."
+ cd "$HYPER_SCATTER_DIR"
+ if [ ! -d "node_modules" ]; then
+ npm install
+ fi
+ npm run build:lib
+fi
+
+echo "Building frontend..."
cd "$FRONTEND_DIR"
# Install dependencies if needed
if [ ! -d "node_modules" ]; then
- echo "📦 Installing dependencies..."
+ echo "Installing frontend dependencies..."
npm install
fi
@@ -21,11 +32,12 @@ fi
npm run build
# Copy to Python package
-echo "📁 Copying to Python package..."
+echo "Copying build output into Python package..."
rm -rf "$STATIC_DIR"
mkdir -p "$STATIC_DIR"
cp -r out/* "$STATIC_DIR/"
+echo ""
echo "✅ Frontend exported to $STATIC_DIR"
echo ""
echo "To test, run:"
diff --git a/scripts/load_cosmos_curate.py b/scripts/load_cosmos_curate.py
new file mode 100644
index 0000000..e69d7cd
--- /dev/null
+++ b/scripts/load_cosmos_curate.py
@@ -0,0 +1,725 @@
+"""Load Cosmos-Curate outputs into HyperView for hackathon demos.
+
+This script builds a HyperView dataset directly from Cosmos-Curate artifacts:
+- split output metadata (`metas/v0/*.json` or `metas_jsonl/v0/*.jsonl`)
+- split output embeddings parquet (`*_embd_parquet/*.parquet`)
+- optional dedup output (`extraction/semdedup_pruning_tables/*.parquet`)
+
+It then:
+1) adds clip samples with preview thumbnails,
+2) imports precomputed embeddings as a HyperView space,
+3) computes Euclidean + Poincaré 2D layouts,
+4) launches HyperView.
+"""
+
+from __future__ import annotations
+
+import argparse
+import base64
+import csv
+import io
+import json
+import pickle
+import sys
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Any
+
+import numpy as np
+import pyarrow.parquet as pq
+from PIL import Image
+
+sys.path.insert(0, str(Path(__file__).resolve().parent.parent / "src"))
+
+import hyperview as hv
+from hyperview.core.sample import Sample
+from hyperview.storage.schema import make_layout_key
+
+
+@dataclass(frozen=True)
+class ClipRecord:
+ clip_id: str
+ filepath: str
+ label: str | None
+ metadata: dict[str, Any]
+ thumbnail_base64: str | None = None
+
+
+def _normalize_embedding_algorithm(raw_name: str | None) -> str:
+ name = (raw_name or "").strip().lower().replace("_", "-")
+ aliases = {
+ "internvideo2-mm": "internvideo2",
+ "iv2": "internvideo2",
+ "cosmos-embed1": "cosmos-embed1-224p",
+ }
+ return aliases.get(name, name)
+
+
+def _infer_embedding_algorithm(split_output_path: Path, split_summary: dict[str, Any]) -> str:
+ summary_value = _normalize_embedding_algorithm(split_summary.get("embedding_algorithm"))
+ if summary_value:
+ return summary_value
+
+ for model_name in split_summary.get("models_used", []) or []:
+ model_norm = _normalize_embedding_algorithm(str(model_name))
+ if "cosmos-embed1" in model_norm:
+ if model_norm == "cosmos-embed1":
+ return "cosmos-embed1-224p"
+ return model_norm
+ if "internvideo2" in model_norm:
+ return "internvideo2"
+
+ if (split_output_path / "ce1_embd_parquet").exists() or (split_output_path / "ce1_embd").exists():
+ return "cosmos-embed1-224p"
+ if (split_output_path / "iv2_embd_parquet").exists() or (split_output_path / "iv2_embd").exists():
+ return "internvideo2"
+
+ raise ValueError(
+ "Could not infer embedding algorithm from split summary or directory structure. "
+ "Expected summary.embedding_algorithm/models_used or one of ce1_embd[_parquet]/iv2_embd[_parquet]."
+ )
+
+
+def _normalize_path(path_str: str) -> Path:
+ path = Path(path_str).expanduser().resolve()
+ if not path.exists():
+ raise FileNotFoundError(f"Path does not exist: {path}")
+ return path
+
+
+def _read_json(path: Path) -> dict[str, Any]:
+ with path.open("r", encoding="utf-8") as f:
+ return json.load(f)
+
+
+def _embedding_parquet_dir(split_output_path: Path, embedding_algorithm: str) -> Path:
+ algo = _normalize_embedding_algorithm(embedding_algorithm)
+ if algo == "internvideo2":
+ return split_output_path / "iv2_embd_parquet"
+ if algo.startswith("cosmos-embed1"):
+ return split_output_path / "ce1_embd_parquet"
+ return split_output_path / f"{algo}_embd_parquet"
+
+
+def _embedding_pickle_dir(split_output_path: Path, embedding_algorithm: str) -> Path:
+ algo = _normalize_embedding_algorithm(embedding_algorithm)
+ if algo == "internvideo2":
+ return split_output_path / "iv2_embd"
+ if algo.startswith("cosmos-embed1"):
+ return split_output_path / "ce1_embd"
+ return split_output_path / f"{algo}_embd"
+
+
+def _embedding_model_id(embedding_algorithm: str) -> str:
+ algo = _normalize_embedding_algorithm(embedding_algorithm)
+ mapping = {
+ "cosmos-embed1-224p": "nvidia/Cosmos-Embed1-224p",
+ "cosmos-embed1-336p": "nvidia/Cosmos-Embed1-336p",
+ "cosmos-embed1-448p": "nvidia/Cosmos-Embed1-448p",
+ "internvideo2": "OpenGVLab/InternVideo2-Stage2_1B-224p-f4",
+ }
+ return mapping.get(algo, embedding_algorithm)
+
+
+def _expected_embedding_dim(embedding_algorithm: str) -> int | None:
+ algo = _normalize_embedding_algorithm(embedding_algorithm)
+ expected = {
+ "cosmos-embed1-224p": 256,
+ "cosmos-embed1-336p": 768,
+ "cosmos-embed1-448p": 768,
+ }
+ return expected.get(algo)
+
+
+def _dedup_summary_path(dedup_output_path: Path, dedup_eps: float) -> Path:
+ tag = f"{dedup_eps:.6g}".rstrip("0").rstrip(".")
+ return dedup_output_path / "extraction" / f"dedup_summary_{tag}.csv"
+
+
+def _load_dedup_maps(dedup_output_path: Path, dedup_eps: float) -> tuple[dict[str, bool], dict[str, float]]:
+ keep_map: dict[str, bool] = {}
+ score_map: dict[str, float] = {}
+ pruning_dir = dedup_output_path / "extraction" / "semdedup_pruning_tables"
+ if not pruning_dir.exists():
+ return keep_map, score_map
+
+ threshold = 1.0 - dedup_eps
+ for parquet_path in sorted(pruning_dir.glob("cluster_*.parquet")):
+ table = pq.read_table(parquet_path, columns=["id", "cosine_sim_score"])
+ ids = table.column("id").to_pylist()
+ scores = table.column("cosine_sim_score").to_pylist()
+ for clip_id, score in zip(ids, scores):
+ if clip_id is None or score is None:
+ continue
+ clip_id_str = str(clip_id)
+ score_f = float(score)
+ keep_map[clip_id_str] = score_f <= threshold
+ score_map[clip_id_str] = score_f
+ return keep_map, score_map
+
+
+def _select_caption(window: dict[str, Any], caption_field: str | None) -> str | None:
+ if caption_field and caption_field in window:
+ value = window.get(caption_field)
+ if isinstance(value, str) and value.strip():
+ return value.strip()
+
+ for key, value in window.items():
+ if key.endswith("_caption") and isinstance(value, str) and value.strip():
+ return value.strip()
+ return None
+
+
+def _select_caption_from_row(row: dict[str, Any], caption_field: str | None) -> str | None:
+ windows = row.get("windows") or []
+ first_window = windows[0] if isinstance(windows, list) and windows else {}
+ if isinstance(first_window, dict):
+ caption = _select_caption(first_window, caption_field)
+ if caption:
+ return caption
+
+ captions_obj = row.get("captions")
+ if isinstance(captions_obj, dict):
+ preferred_keys = [caption_field] if caption_field else []
+ preferred_keys += ["cosmos_r2", "cosmos_r2_caption", "caption", "default"]
+
+ for key in preferred_keys:
+ if not key:
+ continue
+ value = captions_obj.get(key)
+ if isinstance(value, str) and value.strip():
+ return value.strip()
+
+ for value in captions_obj.values():
+ if isinstance(value, str) and value.strip():
+ return value.strip()
+
+ for key in ("caption", "first_caption", "cosmos_r2_caption"):
+ value = row.get(key)
+ if isinstance(value, str) and value.strip():
+ return value.strip()
+
+ return None
+
+
+def _extract_reasoning_and_answer(caption_text: str | None) -> tuple[str | None, str | None]:
+ if not caption_text:
+ return None, None
+
+ reasoning: str | None = None
+ answer: str | None = None
+
+ think_start = caption_text.find("")
+ think_end = caption_text.find(" ")
+ if think_start != -1 and think_end != -1 and think_end > think_start:
+ reasoning = caption_text[think_start + len("") : think_end].strip() or None
+
+ answer_start = caption_text.find("")
+ answer_end = caption_text.find(" ")
+ if answer_start != -1 and answer_end != -1 and answer_end > answer_start:
+ answer = caption_text[answer_start + len("") : answer_end].strip() or None
+
+ if answer is None:
+ answer = caption_text.strip() or None
+
+ return reasoning, answer
+
+
+def _default_label(
+ *,
+ label_mode: str,
+ source_video: str,
+ keep_flag: bool | None,
+) -> str:
+ if label_mode == "dedup-status":
+ if keep_flag is True:
+ return "kept"
+ if keep_flag is False:
+ return "removed"
+ return "unknown"
+ return Path(source_video).stem or "unknown"
+
+
+def _iter_clip_metadata_rows(split_output_path: Path) -> list[dict[str, Any]]:
+ metas_dir = split_output_path / "metas" / "v0"
+ metas_jsonl_dir = split_output_path / "metas_jsonl" / "v0"
+
+ rows: list[dict[str, Any]] = []
+ if metas_dir.exists():
+ for json_path in sorted(metas_dir.glob("*.json")):
+ rows.append(_read_json(json_path))
+ return rows
+
+ if metas_jsonl_dir.exists():
+ for jsonl_path in sorted(metas_jsonl_dir.glob("*.jsonl")):
+ with jsonl_path.open("r", encoding="utf-8") as f:
+ for line in f:
+ line = line.strip()
+ if line:
+ rows.append(json.loads(line))
+ return rows
+
+ raise FileNotFoundError(
+ "No clip metadata found. Expected one of: "
+ f"{metas_dir} or {metas_jsonl_dir}."
+ )
+
+
+def _placeholder_image(split_output_path: Path) -> Path:
+ placeholder_path = split_output_path / "_hyperview_placeholder.jpg"
+ if not placeholder_path.exists():
+ placeholder_path.parent.mkdir(parents=True, exist_ok=True)
+ Image.new("RGB", (96, 96), color=(35, 35, 35)).save(placeholder_path, "JPEG", quality=92)
+ return placeholder_path
+
+
+def _thumbnail_base64_from_image(path: Path, fallback: Path) -> str | None:
+ for candidate in (path, fallback):
+ try:
+ with Image.open(candidate) as img:
+ if img.mode in ("RGBA", "P"):
+ img = img.convert("RGB")
+ buffer = io.BytesIO()
+ img.save(buffer, format="JPEG", quality=85)
+ return base64.b64encode(buffer.getvalue()).decode("utf-8")
+ except Exception:
+ continue
+ return None
+
+
+def _build_clip_records(
+ *,
+ split_output_path: Path,
+ metadata_rows: list[dict[str, Any]],
+ dedup_keep_map: dict[str, bool],
+ dedup_score_map: dict[str, float],
+ caption_field: str | None,
+ label_mode: str,
+ max_samples: int,
+ allow_placeholder: bool,
+) -> list[ClipRecord]:
+ records: list[ClipRecord] = []
+ placeholder_path = _placeholder_image(split_output_path)
+ previews_dir = split_output_path / "previews"
+
+ for row in metadata_rows:
+ clip_id = str(row.get("span_uuid") or row.get("clip_uuid") or row.get("id") or "").strip()
+ if not clip_id:
+ continue
+
+ windows = row.get("windows") or []
+ first_window = windows[0] if windows else {}
+ start_frame = first_window.get("start_frame") if isinstance(first_window, dict) else None
+ end_frame = first_window.get("end_frame") if isinstance(first_window, dict) else None
+ if start_frame is None or end_frame is None:
+ num_frames = row.get("num_frames")
+ if isinstance(num_frames, int) and num_frames > 0:
+ start_frame = 0
+ end_frame = num_frames
+
+ preview_path: Path | None = None
+ if start_frame is not None and end_frame is not None:
+ nested_candidate = previews_dir / clip_id / f"{start_frame}_{end_frame}.webp"
+ flat_candidate = previews_dir / f"{clip_id}_{start_frame}_{end_frame}.webp"
+ if nested_candidate.exists():
+ preview_path = nested_candidate
+ elif flat_candidate.exists():
+ preview_path = flat_candidate
+
+ if preview_path is None:
+ flat_matches = sorted(previews_dir.glob(f"{clip_id}_*.webp"))
+ if flat_matches:
+ preview_path = flat_matches[0]
+
+ if preview_path is None:
+ nested_matches = sorted((previews_dir / clip_id).glob("*.webp")) if (previews_dir / clip_id).exists() else []
+ if nested_matches:
+ preview_path = nested_matches[0]
+
+ if preview_path is None and allow_placeholder:
+ preview_path = placeholder_path
+ if preview_path is None:
+ continue
+
+ source_video = str(row.get("source_video", ""))
+ clip_location = row.get("clip_location")
+ video_path = str(clip_location).strip() if clip_location else ""
+ if not video_path:
+ default_clip = split_output_path / "clips" / f"{clip_id}.mp4"
+ if default_clip.exists():
+ video_path = str(default_clip)
+
+ keep_flag = dedup_keep_map.get(clip_id)
+ cosine_sim_score = dedup_score_map.get(clip_id)
+ caption = _select_caption_from_row(row, caption_field)
+ caption_reasoning, caption_answer = _extract_reasoning_and_answer(caption)
+
+ span = row.get("duration_span")
+ if span is None:
+ span = row.get("span")
+
+ metadata = {
+ "clip_id": clip_id,
+ "source_video": source_video,
+ "clip_location": clip_location,
+ "video_path": video_path or None,
+ "duration_span": span,
+ "span": row.get("span"),
+ "window_count": len(windows),
+ "first_caption": caption,
+ "caption_raw": caption,
+ "caption_answer": caption_answer,
+ "caption_reasoning": caption_reasoning,
+ "has_caption": bool(row.get("has_caption", False) or caption),
+ "aesthetic_score": row.get("aesthetic_score"),
+ "motion_score": row.get("motion_score"),
+ "dimensions": row.get("dimensions"),
+ "framerate": row.get("framerate"),
+ "num_frames": row.get("num_frames"),
+ "dedup_keep": keep_flag,
+ "dedup_status": "kept" if keep_flag is True else ("removed" if keep_flag is False else "unknown"),
+ "cosine_sim_score": cosine_sim_score,
+ "dedup_cosine_similarity": cosine_sim_score,
+ "preview_path": str(preview_path),
+ "preview_is_placeholder": preview_path == placeholder_path,
+ }
+
+ label = _default_label(label_mode=label_mode, source_video=source_video, keep_flag=keep_flag)
+ sample_filepath = video_path or str(preview_path)
+
+ records.append(
+ ClipRecord(
+ clip_id=clip_id,
+ filepath=sample_filepath,
+ label=label,
+ metadata=metadata,
+ thumbnail_base64=_thumbnail_base64_from_image(preview_path, placeholder_path),
+ )
+ )
+
+ if max_samples > 0 and len(records) >= max_samples:
+ break
+
+ return records
+
+
+def _extract_dimensions(metadata: dict[str, Any]) -> tuple[int | None, int | None]:
+ dims = metadata.get("dimensions")
+ if not isinstance(dims, (list, tuple)) or len(dims) < 2:
+ return None, None
+
+ try:
+ width = int(dims[0])
+ height = int(dims[1])
+ except (TypeError, ValueError):
+ return None, None
+
+ if width <= 0 or height <= 0:
+ return None, None
+
+ return width, height
+
+
+def _load_embeddings(
+ split_output_path: Path,
+ embedding_algorithm: str,
+ *,
+ clip_ids: set[str],
+) -> tuple[list[str], np.ndarray]:
+ algo = _normalize_embedding_algorithm(embedding_algorithm)
+ parquet_dir = _embedding_parquet_dir(split_output_path, algo)
+ pickle_dir = _embedding_pickle_dir(split_output_path, algo)
+
+ vectors_by_id: dict[str, np.ndarray] = {}
+
+ def _store_vector(clip_id: str, payload: Any) -> None:
+ if clip_id in vectors_by_id:
+ return
+ if clip_id not in clip_ids:
+ return
+
+ vec: np.ndarray | None = None
+ if isinstance(payload, np.ndarray):
+ vec = payload.astype(np.float32)
+ elif isinstance(payload, (list, tuple)):
+ vec = np.asarray(payload, dtype=np.float32)
+ elif isinstance(payload, dict):
+ for key in ("embedding", "vector", "emb", "embd"):
+ if key in payload:
+ vec = np.asarray(payload[key], dtype=np.float32)
+ break
+
+ if vec is None or vec.ndim != 1 or vec.size == 0 or np.any(~np.isfinite(vec)):
+ return
+ vectors_by_id[clip_id] = vec
+
+ if parquet_dir.exists():
+ for parquet_path in sorted(parquet_dir.glob("*.parquet")):
+ table = pq.read_table(parquet_path)
+ cols = {name.lower(): name for name in table.column_names}
+
+ id_col_name = next((cols[c] for c in ("id", "clip_uuid", "span_uuid", "uuid") if c in cols), None)
+ vec_col_name = next(
+ (cols[c] for c in ("embedding", "vector", "embeddings", "embd") if c in cols),
+ None,
+ )
+
+ if id_col_name is None or vec_col_name is None:
+ continue
+
+ id_values = table.column(id_col_name).to_pylist()
+ emb_values = table.column(vec_col_name).to_pylist()
+
+ for clip_id_raw, emb in zip(id_values, emb_values):
+ if clip_id_raw is None or emb is None:
+ continue
+ _store_vector(str(clip_id_raw), emb)
+
+ if (not vectors_by_id) and pickle_dir.exists():
+ for pickle_path in sorted(pickle_dir.glob("*.pickle")):
+ clip_id = pickle_path.stem
+ if clip_id not in clip_ids:
+ continue
+ try:
+ with pickle_path.open("rb") as f:
+ payload = pickle.load(f)
+ except Exception:
+ continue
+ _store_vector(clip_id, payload)
+
+ if not vectors_by_id:
+ return [], np.empty((0, 0), dtype=np.float32)
+
+ ids = list(vectors_by_id.keys())
+ vectors = list(vectors_by_id.values())
+
+ if not vectors:
+ return [], np.empty((0, 0), dtype=np.float32)
+
+ dim = vectors[0].shape[0]
+ filtered_ids: list[str] = []
+ filtered_vectors: list[np.ndarray] = []
+ for clip_id, vec in zip(ids, vectors):
+ if vec.shape[0] == dim:
+ filtered_ids.append(clip_id)
+ filtered_vectors.append(vec)
+
+ if not filtered_vectors:
+ return [], np.empty((0, 0), dtype=np.float32)
+
+ return filtered_ids, np.vstack(filtered_vectors).astype(np.float32)
+
+
+def _dedup_summary_metrics(dedup_output_path: Path, dedup_eps: float) -> dict[str, Any] | None:
+ summary_path = _dedup_summary_path(dedup_output_path, dedup_eps)
+ if not summary_path.exists():
+ return None
+
+ with summary_path.open("r", encoding="utf-8") as f:
+ reader = csv.DictReader(f)
+ rows = list(reader)
+ if not rows:
+ return None
+
+ row = rows[0]
+ total = int(row.get("total", 0))
+ kept = int(row.get("kept", 0))
+ removed = int(row.get("removed", 0))
+ reduction_pct = (removed / total * 100.0) if total > 0 else 0.0
+
+ return {
+ "eps": float(row.get("eps", dedup_eps)),
+ "kept": kept,
+ "removed": removed,
+ "total": total,
+ "reduction_pct": reduction_pct,
+ }
+
+
+def _manual_small_layout_coords(num_points: int, *, poincare: bool) -> np.ndarray:
+ if num_points <= 0:
+ return np.empty((0, 2), dtype=np.float32)
+ if num_points == 1:
+ coords = np.array([[0.0, 0.0]], dtype=np.float32)
+ elif num_points == 2:
+ coords = np.array([[-0.35, 0.0], [0.35, 0.0]], dtype=np.float32)
+ else:
+ angles = np.linspace(0, 2 * np.pi, num_points, endpoint=False, dtype=np.float32)
+ radius = 0.6 if poincare else 1.0
+ coords = np.stack([radius * np.cos(angles), radius * np.sin(angles)], axis=1).astype(np.float32)
+
+ if poincare:
+ norms = np.linalg.norm(coords, axis=1, keepdims=True)
+ mask = norms >= 0.98
+ if np.any(mask):
+ scale = (0.98 / np.maximum(norms[mask], 1e-9)).reshape(-1, 1)
+ coords[mask[:, 0]] *= scale
+ return coords
+
+
+def _create_small_layouts(dataset: hv.Dataset, *, space_key: str, ids: list[str]) -> None:
+ euclidean_key = make_layout_key(space_key, method="manual", geometry="euclidean")
+ poincare_key = make_layout_key(space_key, method="manual", geometry="poincare")
+
+ dataset._storage.ensure_layout(
+ layout_key=euclidean_key,
+ space_key=space_key,
+ method="manual",
+ geometry="euclidean",
+ params={"reason": "too_few_samples_for_umap"},
+ )
+ dataset._storage.ensure_layout(
+ layout_key=poincare_key,
+ space_key=space_key,
+ method="manual",
+ geometry="poincare",
+ params={"reason": "too_few_samples_for_umap"},
+ )
+
+ dataset._storage.add_layout_coords(euclidean_key, ids, _manual_small_layout_coords(len(ids), poincare=False))
+ dataset._storage.add_layout_coords(poincare_key, ids, _manual_small_layout_coords(len(ids), poincare=True))
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser(description="Load Cosmos-Curate outputs into HyperView")
+ parser.add_argument("--split-output-path", required=True, help="Path to Cosmos-Curate split output directory")
+ parser.add_argument("--dedup-output-path", default=None, help="Path to Cosmos-Curate dedup output directory")
+ parser.add_argument("--dedup-eps", type=float, default=0.01, help="Epsilon used for semantic dedup")
+ parser.add_argument("--caption-field", default="cosmos_r2_caption", help="Preferred caption field in windows")
+ parser.add_argument(
+ "--label-mode",
+ choices=["source-video", "dedup-status"],
+ default="dedup-status",
+ help="How labels are assigned in HyperView",
+ )
+ parser.add_argument("--dataset-name", default="cosmos_curate_submission", help="HyperView dataset name")
+ parser.add_argument(
+ "--persist",
+ action=argparse.BooleanOptionalAction,
+ default=True,
+ help="Persist dataset in LanceDB (--persist) or keep in-memory (--no-persist)",
+ )
+ parser.add_argument("--max-samples", type=int, default=0, help="Max clips to load (0 = all)")
+ parser.add_argument(
+ "--allow-placeholder",
+ action=argparse.BooleanOptionalAction,
+ default=True,
+ help="Use a generated placeholder image when preview webp is missing",
+ )
+ parser.add_argument("--port", type=int, default=6263, help="Port for HyperView")
+ parser.add_argument("--no-browser", action="store_true", help="Do not auto-open browser")
+ parser.add_argument("--reuse-server", action="store_true", help="Reuse compatible running HyperView server")
+ args = parser.parse_args()
+
+ split_output_path = _normalize_path(args.split_output_path)
+ dedup_output_path = _normalize_path(args.dedup_output_path) if args.dedup_output_path else None
+
+ summary_path = split_output_path / "summary.json"
+ if not summary_path.exists():
+ raise FileNotFoundError(f"Missing split summary file: {summary_path}")
+ split_summary = _read_json(summary_path)
+ embedding_algorithm = _infer_embedding_algorithm(split_output_path, split_summary)
+
+ print(f"Loading split output from: {split_output_path}")
+ print(f"Embedding algorithm: {embedding_algorithm}")
+
+ dedup_keep_map: dict[str, bool] = {}
+ dedup_score_map: dict[str, float] = {}
+ dedup_metrics: dict[str, Any] | None = None
+ if dedup_output_path is not None:
+ dedup_keep_map, dedup_score_map = _load_dedup_maps(dedup_output_path, args.dedup_eps)
+ dedup_metrics = _dedup_summary_metrics(dedup_output_path, args.dedup_eps)
+ print(f"Loaded dedup flags for {len(dedup_keep_map)} clips")
+
+ metadata_rows = _iter_clip_metadata_rows(split_output_path)
+ records = _build_clip_records(
+ split_output_path=split_output_path,
+ metadata_rows=metadata_rows,
+ dedup_keep_map=dedup_keep_map,
+ dedup_score_map=dedup_score_map,
+ caption_field=args.caption_field,
+ label_mode=args.label_mode,
+ max_samples=args.max_samples,
+ allow_placeholder=args.allow_placeholder,
+ )
+ if not records:
+ raise RuntimeError("No clip samples loaded from split output")
+
+ dataset = hv.Dataset(args.dataset_name, persist=args.persist)
+ samples: list[Sample] = []
+ for record in records:
+ width, height = _extract_dimensions(record.metadata)
+ samples.append(
+ Sample(
+ id=record.clip_id,
+ filepath=record.filepath,
+ label=record.label,
+ metadata=record.metadata,
+ thumbnail_base64=record.thumbnail_base64,
+ width=width,
+ height=height,
+ )
+ )
+ dataset._storage.add_samples_batch(samples)
+ print(f"Added/updated {len(samples)} samples in dataset '{args.dataset_name}'")
+
+ ids, vectors = _load_embeddings(
+ split_output_path,
+ embedding_algorithm,
+ clip_ids={record.clip_id for record in records},
+ )
+ if len(ids) == 0:
+ raise RuntimeError(
+ "No embeddings found for loaded clips. Checked both parquet and pickle embedding directories."
+ )
+
+ expected_dim = _expected_embedding_dim(embedding_algorithm)
+ if expected_dim is not None and int(vectors.shape[1]) != expected_dim:
+ print(
+ "WARNING: Loaded embedding dimension does not match expected model output: "
+ f"algorithm={embedding_algorithm} expected_dim={expected_dim} loaded_dim={vectors.shape[1]}"
+ )
+
+ model_id = _embedding_model_id(embedding_algorithm)
+ space_key = f"cosmos_curate__{embedding_algorithm.replace('/', '_')}"
+ dataset._storage.ensure_space(
+ model_id=model_id,
+ dim=int(vectors.shape[1]),
+ config={
+ "provider": "cosmos-curate",
+ "geometry": "euclidean",
+ "algorithm": embedding_algorithm,
+ "source": "cosmos-curate",
+ },
+ space_key=space_key,
+ )
+ dataset._storage.add_embeddings(space_key=space_key, ids=ids, vectors=vectors)
+ print(f"Imported {len(ids)} embeddings (dim={vectors.shape[1]}) into space '{space_key}'")
+
+ if len(ids) >= 3:
+ print("Computing Euclidean layout...")
+ dataset.compute_visualization(space_key=space_key, geometry="euclidean")
+ print("Computing Poincaré layout...")
+ dataset.compute_visualization(space_key=space_key, geometry="poincare")
+ else:
+ print("Too few samples for UMAP; creating manual Euclidean/Poincaré layouts...")
+ _create_small_layouts(dataset, space_key=space_key, ids=ids)
+
+ if dedup_metrics is not None:
+ print(
+ "Dedup summary: "
+ f"kept={dedup_metrics['kept']} removed={dedup_metrics['removed']} "
+ f"total={dedup_metrics['total']} reduction={dedup_metrics['reduction_pct']:.2f}%"
+ )
+
+ print(f"Launching HyperView at http://127.0.0.1:{args.port}")
+ hv.launch(
+ dataset,
+ port=args.port,
+ open_browser=not args.no_browser,
+ reuse_server=args.reuse_server,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/load_nuscenes.py b/scripts/load_nuscenes.py
new file mode 100644
index 0000000..ea8e73a
--- /dev/null
+++ b/scripts/load_nuscenes.py
@@ -0,0 +1,330 @@
+"""Load nuScenes mini dataset into HyperView for embedding visualization.
+
+This script:
+1. Loads nuScenes mini (10 scenes, ~404 samples)
+2. Extracts front-camera images and their object annotations (hierarchical labels)
+3. Crops annotated objects from camera images
+4. Loads them into HyperView as samples with hierarchical category labels
+5. Computes CLIP embeddings + both Euclidean and Poincaré projections
+6. Launches HyperView to visualize
+
+Usage:
+ uv run python scripts/load_nuscenes.py [--dataroot ~/nuscenes] [--max-crops 2000]
+"""
+
+import argparse
+import hashlib
+import os
+import sys
+from pathlib import Path
+
+import numpy as np
+from PIL import Image
+
+# Add src to path for local dev
+sys.path.insert(0, str(Path(__file__).resolve().parent.parent / "src"))
+
+import hyperview as hv
+from hyperview.core.sample import Sample
+
+
+def get_2d_box(nusc, ann, sample_data):
+ """Project a 3D annotation to 2D bounding box on the camera image.
+
+ Returns (xmin, ymin, xmax, ymax) or None if not visible.
+ """
+ from nuscenes.utils.data_classes import Box
+ from nuscenes.utils.geometry_utils import view_points
+ from pyquaternion import Quaternion
+
+ # Get the annotation box in global frame
+ box = Box(
+ ann["translation"],
+ ann["size"],
+ Quaternion(ann["rotation"]),
+ )
+
+ # Move box to ego vehicle frame
+ ego_pose = nusc.get("ego_pose", sample_data["ego_pose_token"])
+ box.translate(-np.array(ego_pose["translation"]))
+ box.rotate(Quaternion(ego_pose["rotation"]).inverse)
+
+ # Move box to sensor frame
+ cal_sensor = nusc.get("calibrated_sensor", sample_data["calibrated_sensor_token"])
+ box.translate(-np.array(cal_sensor["translation"]))
+ box.rotate(Quaternion(cal_sensor["rotation"]).inverse)
+
+ # Check if box is in front of camera
+ if box.center[2] <= 0:
+ return None
+
+ # Project corners to 2D
+ camera_intrinsic = np.array(cal_sensor["camera_intrinsic"])
+ corners_3d = box.corners() # (3, 8)
+ corners_2d = view_points(corners_3d, camera_intrinsic, normalize=True)[:2] # (2, 8)
+
+ # Get bounding rectangle
+ xmin = max(0, int(np.min(corners_2d[0])))
+ ymin = max(0, int(np.min(corners_2d[1])))
+ xmax = min(1600, int(np.max(corners_2d[0])))
+ ymax = min(900, int(np.max(corners_2d[1])))
+
+ # Filter out tiny or off-screen boxes
+ w = xmax - xmin
+ h = ymax - ymin
+ if w < 20 or h < 20 or xmin >= 1600 or ymin >= 900:
+ return None
+
+ return (xmin, ymin, xmax, ymax)
+
+
+def load_nuscenes_crops(dataroot: str, max_crops: int = 2000, camera: str = "CAM_FRONT"):
+ """Extract object crops from nuScenes mini with hierarchical labels.
+
+ Returns list of (image_path, category_name, metadata) tuples.
+ """
+ from nuscenes.nuscenes import NuScenes
+
+ print(f"Loading nuScenes mini from {dataroot}...")
+ nusc = NuScenes(version="v1.0-mini", dataroot=dataroot, verbose=True)
+
+ # Create output directory for crops
+ crops_dir = Path(dataroot) / "crops"
+ crops_dir.mkdir(exist_ok=True)
+
+ crops = []
+ total_anns = 0
+
+ for sample in nusc.sample:
+ # Get camera data
+ cam_token = sample["data"].get(camera)
+ if cam_token is None:
+ continue
+ cam_data = nusc.get("sample_data", cam_token)
+ img_path = os.path.join(dataroot, cam_data["filename"])
+
+ if not os.path.exists(img_path):
+ continue
+
+ img = Image.open(img_path)
+
+ for ann_token in sample["anns"]:
+ ann = nusc.get("sample_annotation", ann_token)
+ category = ann["category_name"] # e.g. "vehicle.car", "human.pedestrian.adult"
+
+ # Project to 2D
+ bbox = get_2d_box(nusc, ann, cam_data)
+ if bbox is None:
+ continue
+
+ xmin, ymin, xmax, ymax = bbox
+ crop = img.crop((xmin, ymin, xmax, ymax))
+
+ # Skip very small crops
+ if crop.size[0] < 24 or crop.size[1] < 24:
+ continue
+
+ # Save crop
+ crop_id = hashlib.md5(f"{ann_token}_{camera}".encode()).hexdigest()[:12]
+ crop_path = crops_dir / f"{crop_id}.jpg"
+ if not crop_path.exists():
+ crop.convert("RGB").save(crop_path, "JPEG", quality=90)
+
+ # Parse hierarchy
+ parts = category.split(".")
+ top_level = parts[0] if len(parts) >= 1 else category
+ mid_level = ".".join(parts[:2]) if len(parts) >= 2 else category
+
+ visibility_token = ann.get("visibility_token", "")
+ vis = nusc.get("visibility", visibility_token)["description"] if visibility_token else ""
+
+ metadata = {
+ "category": category,
+ "top_level": top_level,
+ "mid_level": mid_level,
+ "visibility": vis,
+ "num_lidar_pts": ann.get("num_lidar_pts", 0),
+ "scene_token": sample["scene_token"],
+ "sample_token": sample["token"],
+ "camera": camera,
+ "bbox": list(bbox),
+ }
+
+ crops.append((str(crop_path), category, metadata, crop_id))
+ total_anns += 1
+
+ if len(crops) >= max_crops:
+ break
+
+ if len(crops) >= max_crops:
+ break
+
+ print(f"Extracted {len(crops)} object crops from {total_anns} annotations")
+
+ # Print category distribution
+ from collections import Counter
+ cat_counts = Counter(c[1] for c in crops)
+ print("\nCategory distribution:")
+ for cat, count in cat_counts.most_common():
+ print(f" {cat}: {count}")
+
+ return crops, nusc
+
+
+def load_nuscenes_scenes(dataroot: str, camera: str = "CAM_FRONT"):
+ """Load full camera images (scene-level) with scene metadata.
+
+ Good for a broader overview. Each sample is one camera keyframe.
+ """
+ from nuscenes.nuscenes import NuScenes
+
+ print(f"Loading nuScenes mini scenes from {dataroot}...")
+ nusc = NuScenes(version="v1.0-mini", dataroot=dataroot, verbose=True)
+
+ images = []
+ for sample in nusc.sample:
+ cam_token = sample["data"].get(camera)
+ if cam_token is None:
+ continue
+ cam_data = nusc.get("sample_data", cam_token)
+ img_path = os.path.join(dataroot, cam_data["filename"])
+
+ if not os.path.exists(img_path):
+ continue
+
+ # Get scene info
+ scene = nusc.get("scene", sample["scene_token"])
+ log = nusc.get("log", scene["log_token"])
+
+ # Count annotations by top-level category
+ ann_summary = {}
+ for ann_token in sample["anns"]:
+ ann = nusc.get("sample_annotation", ann_token)
+ top = ann["category_name"].split(".")[0]
+ ann_summary[top] = ann_summary.get(top, 0) + 1
+
+ sample_id = hashlib.md5(f"{sample['token']}_{camera}".encode()).hexdigest()[:12]
+
+ metadata = {
+ "scene_name": scene["name"],
+ "scene_description": scene["description"],
+ "location": log["location"],
+ "num_annotations": len(sample["anns"]),
+ "annotation_summary": ann_summary,
+ "timestamp": sample["timestamp"],
+ }
+
+ # Label by location + description for scene-level clustering
+ label = log["location"]
+
+ images.append((img_path, label, metadata, sample_id))
+
+ print(f"Loaded {len(images)} scene images")
+ return images, nusc
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Load nuScenes into HyperView")
+ parser.add_argument("--dataroot", default=os.path.expanduser("~/nuscenes"),
+ help="Path to nuScenes data")
+ parser.add_argument("--max-crops", type=int, default=2000,
+ help="Max number of object crops to extract")
+ parser.add_argument("--mode", choices=["crops", "scenes", "both"], default="crops",
+ help="What to load: 'crops' (object-level), 'scenes' (image-level), or 'both'")
+ parser.add_argument("--model", default="openai/clip-vit-base-patch32",
+ help="Embedding model to use")
+ parser.add_argument("--port", type=int, default=6263)
+ parser.add_argument("--no-browser", action="store_true")
+ parser.add_argument(
+ "--dataset-name",
+ default="nuscenes_mini",
+ help="HyperView dataset name. Use a unique name for isolated runs.",
+ )
+ parser.add_argument(
+ "--persist",
+ action=argparse.BooleanOptionalAction,
+ default=True,
+ help=(
+ "Persist dataset in LanceDB (--persist, default) or keep in-memory only (--no-persist). "
+ "Use --no-persist for a guaranteed fresh small run."
+ ),
+ )
+ parser.add_argument(
+ "--reuse-server",
+ action="store_true",
+ help="Attach to an existing HyperView server on --port if compatible.",
+ )
+ args = parser.parse_args()
+
+ # Check dataroot exists
+ if not os.path.isdir(os.path.join(args.dataroot, "v1.0-mini")):
+ print(f"ERROR: nuScenes mini not found at {args.dataroot}")
+ print("Download: wget https://www.nuscenes.org/data/v1.0-mini.tgz")
+ print(f"Extract: tar -xf v1.0-mini.tgz -C {args.dataroot}")
+ sys.exit(1)
+
+ dataset = hv.Dataset(args.dataset_name, persist=args.persist)
+
+ if args.persist and len(dataset) > 0:
+ print(
+ f"NOTE: dataset '{args.dataset_name}' already has {len(dataset)} samples. "
+ "Use --no-persist or a different --dataset-name for a fresh small run."
+ )
+
+ if args.mode in ("crops", "both"):
+ crops, nusc = load_nuscenes_crops(args.dataroot, max_crops=args.max_crops)
+
+ print(f"\nAdding {len(crops)} crops to HyperView...")
+ samples = []
+ for filepath, category, metadata, crop_id in crops:
+ sample = Sample(
+ id=f"crop_{crop_id}",
+ filepath=filepath,
+ label=category, # Full hierarchical label like "vehicle.car"
+ metadata=metadata,
+ )
+ samples.append(sample)
+
+ dataset._storage.add_samples_batch(samples)
+ print(f"Added {len(samples)} crop samples")
+
+ if args.mode in ("scenes", "both"):
+ images, nusc = load_nuscenes_scenes(args.dataroot)
+
+ print(f"\nAdding {len(images)} scene images to HyperView...")
+ samples = []
+ for filepath, label, metadata, sample_id in images:
+ sample = Sample(
+ id=f"scene_{sample_id}",
+ filepath=filepath,
+ label=label,
+ metadata=metadata,
+ )
+ samples.append(sample)
+
+ dataset._storage.add_samples_batch(samples)
+ print(f"Added {len(samples)} scene samples")
+
+ # Compute embeddings
+ print(f"\nComputing embeddings with {args.model}...")
+ space_key = dataset.compute_embeddings(model=args.model, show_progress=True)
+ print(f"Embeddings computed (space_key={space_key})")
+
+ # Compute both visualizations
+ print("\nComputing Euclidean projection...")
+ dataset.compute_visualization(space_key=space_key, geometry="euclidean")
+
+ print("Computing Poincaré projection...")
+ dataset.compute_visualization(space_key=space_key, geometry="poincare")
+
+ print("\nDone! Launching HyperView...")
+ hv.launch(
+ dataset,
+ port=args.port,
+ open_browser=not args.no_browser,
+ reuse_server=args.reuse_server,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/src/hyperview/__init__.py b/src/hyperview/__init__.py
index 7def79f..d055599 100644
--- a/src/hyperview/__init__.py
+++ b/src/hyperview/__init__.py
@@ -1,6 +1,14 @@
"""HyperView - Open-source dataset curation with hyperbolic embeddings visualization."""
-from hyperview.api import Dataset, launch
+from . import _version as _version
+from . import api as _api
-__version__ = "0.1.0"
-__all__ = ["Dataset", "launch", "__version__"]
+Dataset = _api.Dataset
+launch = _api.launch
+__version__ = _version.__version__
+
+__all__ = [
+ "Dataset",
+ "launch",
+ "__version__",
+]
diff --git a/src/hyperview/api.py b/src/hyperview/api.py
index 928553b..c9a7e5b 100644
--- a/src/hyperview/api.py
+++ b/src/hyperview/api.py
@@ -1,46 +1,398 @@
"""Public API for HyperView."""
-from __future__ import annotations
-
+import json
+import os
+import socket
+import threading
+import time
import webbrowser
+from dataclasses import dataclass
+from urllib.error import URLError
+from urllib.request import Request, urlopen
+from uuid import uuid4
+
import uvicorn
from hyperview.core.dataset import Dataset
from hyperview.server.app import create_app, set_dataset
-__all__ = ["Dataset", "launch"]
+__all__ = ["Dataset", "launch", "Session"]
+
+
+@dataclass(frozen=True)
+class _HealthResponse:
+ name: str | None
+ session_id: str | None
+ dataset: str | None
+ pid: int | None
+
+
+def _can_connect(host: str, port: int, timeout_s: float) -> bool:
+ try:
+ with socket.create_connection((host, port), timeout=timeout_s):
+ return True
+ except OSError:
+ return False
+
+
+def _try_read_health(url: str, timeout_s: float) -> _HealthResponse | None:
+ try:
+ return _read_health(url, timeout_s=timeout_s)
+ except (URLError, TimeoutError, OSError, ValueError, json.JSONDecodeError):
+ return None
+
+
+def _read_health(url: str, timeout_s: float) -> _HealthResponse:
+ request = Request(url, headers={"Accept": "application/json"})
+ with urlopen(request, timeout=timeout_s) as response:
+ data = json.loads(response.read().decode("utf-8"))
+
+ return _HealthResponse(
+ name=data.get("name"),
+ session_id=data.get("session_id"),
+ dataset=data.get("dataset"),
+ pid=data.get("pid") if isinstance(data.get("pid"), int) else None,
+ )
+
+
+class Session:
+ """A session for the HyperView visualizer."""
+
+ def __init__(self, dataset: Dataset, host: str, port: int):
+ self.dataset = dataset
+ self.host = host
+ self.port = port
+ # Prefer a browser-connectable host for user-facing URLs.
+ # When binding to 0.0.0.0, users should connect via 127.0.0.1 locally.
+ self.url = f"http://{self._connect_host}:{port}"
+ self._server_thread: threading.Thread | None = None
+ self._server: uvicorn.Server | None = None
+ self._startup_error: BaseException | None = None
+ self.session_id = uuid4().hex
+
+ @property
+ def _connect_host(self) -> str:
+ return "127.0.0.1" if self.host == "0.0.0.0" else self.host
+
+ @property
+ def _health_url(self) -> str:
+ return f"http://{self._connect_host}:{self.port}/__hyperview__/health"
+
+ def _run_server(self):
+ try:
+ set_dataset(self.dataset)
+ app = create_app(self.dataset, session_id=self.session_id)
+ config = uvicorn.Config(app, host=self.host, port=self.port, log_level="warning")
+ self._server = uvicorn.Server(config)
+ self._server.run()
+ except BaseException as exc:
+ self._startup_error = exc
+
+ def start(self, background: bool = True):
+ """Start the visualizer server."""
+ if not background:
+ self._run_server()
+ return
+
+ # Fail fast if something is already listening on this port.
+ if _can_connect(self._connect_host, self.port, timeout_s=0.2):
+ health = _try_read_health(self._health_url, timeout_s=0.2)
+ if health is not None and health.name == "hyperview":
+ raise RuntimeError(
+ "HyperView failed to start because the port is already serving "
+ f"HyperView (port={self.port}, session_id={health.session_id}). "
+ "Choose a different port or stop the existing server."
+ )
+
+ raise RuntimeError(
+ "HyperView failed to start because the port is already in use "
+ f"by a non-HyperView service (port={self.port}). Choose a different "
+ "port or stop the process listening on that port."
+ )
+
+ self._startup_error = None
+ self._server_thread = threading.Thread(target=self._run_server, daemon=True)
+ self._server_thread.start()
+
+ deadline = time.time() + 5.0
+ last_health_error: Exception | None = None
+
+ while time.time() < deadline:
+ if self._startup_error is not None:
+ raise RuntimeError(
+ f"HyperView server failed to start (port={self.port}): "
+ f"{type(self._startup_error).__name__}: {self._startup_error}"
+ )
+
+ if self._server_thread is not None and not self._server_thread.is_alive():
+ raise RuntimeError(
+ "HyperView server thread exited during startup. "
+ f"The port may be in use (port={self.port})."
+ )
+
+ try:
+ health = _read_health(self._health_url, timeout_s=0.2)
+ except (URLError, TimeoutError, OSError, ValueError, json.JSONDecodeError) as exc:
+ last_health_error = exc
+ time.sleep(0.05)
+ continue
+
+ if health.name == "hyperview" and health.session_id == self.session_id:
+ return
+
+ if health.name == "hyperview":
+ raise RuntimeError(
+ "HyperView failed to start because the port is already serving "
+ f"a different HyperView session (port={self.port}, "
+ f"session_id={health.session_id})."
+ )
+
+ raise RuntimeError(
+ "HyperView failed to start because the port is already serving "
+ f"a non-HyperView app (port={self.port})."
+ )
+
+ raise TimeoutError(
+ "HyperView server did not become ready in time "
+ f"(port={self.port}). Last error: {last_health_error}"
+ )
+
+ def stop(self):
+ """Stop the visualizer server."""
+ if self._server:
+ self._server.should_exit = True
+
+ def show(self, height: int = 800):
+ """Display the visualizer in a notebook.
+
+ In Google Colab, notebook kernels cannot be accessed via localhost.
+ Colab exposes kernel ports through a proxy URL (see
+ `google.colab.kernel.proxyPort`). This renders a link to the proxied URL
+ that opens in a new tab.
+
+ In other notebook environments, it renders a clickable link to the local
+ URL and a best-effort JavaScript auto-open.
+ """
+ if _is_colab():
+ try:
+ from google.colab.output import eval_js # type: ignore[import-not-found]
+ from IPython.display import HTML, display
+
+ proxy_url = eval_js(f"google.colab.kernel.proxyPort({self.port})")
+ app_url = str(proxy_url).rstrip("/") + "/"
+
+ display(
+ HTML(
+ "HyperView is running in Colab. "
+ f""
+ "Open HyperView in a new tab .
"
+ )
+ )
+ display(HTML(f"{app_url}
"))
+ return
+ except Exception:
+ # Fall through to the generic notebook behavior.
+ pass
+
+ # Default: open in a new browser tab (works well for Jupyter).
+ try:
+ from IPython.display import HTML, Javascript, display
+
+ display(
+ HTML(
+ "HyperView is running. "
+ f"Open in a new tab ."
+ "
"
+ )
+ )
+
+ # Best-effort auto-open. Some browsers may block popups.
+ display(Javascript(f'window.open("{self.url}", "_blank");'))
+ except ImportError:
+ print(f"IPython not installed. Please visit {self.url} in your browser.")
+
+ def open_browser(self):
+ """Open the visualizer in a browser window."""
+ webbrowser.open(self.url)
def launch(
dataset: Dataset,
- port: int = 5151,
+ port: int = 6262,
host: str = "127.0.0.1",
open_browser: bool = True,
-) -> None:
+ notebook: bool | None = None,
+ height: int = 800,
+ reuse_server: bool = False,
+) -> Session:
"""Launch the HyperView visualization server.
+ Note:
+ HyperView's UI needs at least one 2D layout. If layouts are missing but
+ embedding spaces exist, this function will compute a default layout
+ automatically (Euclidean if any Euclidean space exists, otherwise Poincaré).
+
Args:
dataset: The dataset to visualize.
port: Port to run the server on.
host: Host to bind to.
open_browser: Whether to open a browser window.
+ notebook: Whether to display in a notebook. If None, auto-detects.
+ height: Height of the iframe in the notebook.
+ reuse_server: If True, and the requested port is already serving HyperView,
+ attach to the existing server instead of starting a new one. For safety,
+ this will only attach when the existing server reports the same dataset
+ name (via `/__hyperview__/health`).
+
+ Returns:
+ A Session object.
Example:
>>> import hyperview as hv
>>> dataset = hv.Dataset("my_dataset")
>>> dataset.add_images_dir("/path/to/images", label_from_folder=True)
- >>> dataset.compute_embeddings()
+ >>> dataset.compute_embeddings(model="openai/clip-vit-base-patch32")
>>> dataset.compute_visualization()
>>> hv.launch(dataset)
"""
- set_dataset(dataset)
- app = create_app(dataset)
+ if notebook is None:
+ # Colab is always a notebook environment, even if _is_notebook() fails to detect it
+ notebook = _is_notebook() or _is_colab()
+
+ if _is_colab() and host == "127.0.0.1":
+ # Colab port forwarding/proxying is most reliable when the server binds
+ # to all interfaces.
+ host = "0.0.0.0"
+
+ # Preflight: avoid doing expensive work if the port is already in use.
+ # If it's already serving HyperView and reuse_server=True, we can safely attach.
+ connect_host = "127.0.0.1" if host == "0.0.0.0" else host
+ health_url = f"http://{connect_host}:{port}/__hyperview__/health"
+
+ if _can_connect(connect_host, port, timeout_s=0.2):
+ health = _try_read_health(health_url, timeout_s=0.2)
+ if health is not None and health.name == "hyperview":
+ if not reuse_server:
+ raise RuntimeError(
+ "HyperView failed to start because the port is already serving "
+ f"HyperView (port={port}, dataset={health.dataset}, "
+ f"session_id={health.session_id}, pid={health.pid}). "
+ "Choose a different port, stop the existing server, or pass "
+ "reuse_server=True to attach."
+ )
+
+ if health.dataset is not None and health.dataset != dataset.name:
+ raise RuntimeError(
+ "HyperView refused to attach to the existing server because it is "
+ f"serving a different dataset (port={port}, dataset={health.dataset}). "
+ f"Requested dataset={dataset.name}. Stop the existing server or "
+ "choose a different port."
+ )
+
+ session = Session(dataset, host, port)
+ if health.session_id is not None:
+ session.session_id = health.session_id
+
+ if notebook:
+ if _is_colab():
+ print(
+ f"\nHyperView is already running (Colab, port={session.port}). "
+ "Use the link below to open it."
+ )
+ else:
+ print(
+ f"\nHyperView is already running at {session.url} (port={session.port}). "
+ "Opening a new tab..."
+ )
+ session.show(height=height)
+ else:
+ print(f"\nHyperView is already running at {session.url} (port={session.port}).")
+ if open_browser:
+ session.open_browser()
+
+ return session
+
+ raise RuntimeError(
+ "HyperView failed to start because the port is already in use "
+ f"by a non-HyperView service (port={port}). Choose a different "
+ "port or stop the process listening on that port."
+ )
+
+ # The frontend requires 2D coords from /api/embeddings.
+ # Ensure at least one layout exists; do not auto-generate optional geometries.
+ layouts = dataset.list_layouts()
+ spaces = dataset.list_spaces()
+
+ if not spaces:
+ raise ValueError(
+ "HyperView launch requires 2D projections for the UI. "
+ "No projections or embedding spaces were found. "
+ "Call `dataset.compute_embeddings()` and `dataset.compute_visualization()` "
+ "before `hv.launch()`."
+ )
+
+ if not layouts:
+ has_euclidean_space = any(s.geometry != "hyperboloid" for s in spaces)
+ default_geometry = "euclidean" if has_euclidean_space else "poincare"
+
+ print(f"No layouts found. Computing {default_geometry} visualization...")
+ # Let compute_visualization pick the most appropriate default space.
+ dataset.compute_visualization(space_key=None, geometry=default_geometry)
+
+ session = Session(dataset, host, port)
+
+ if notebook:
+ session.start(background=True)
+ if _is_colab():
+ print(
+ f"\nHyperView is running (Colab, port={session.port}). "
+ "Use the link below to open it."
+ )
+ else:
+ print(f"\nHyperView is running at {session.url}. Opening a new tab...")
+ session.show(height=height)
+ else:
+ session.start(background=True)
+ print(" Press Ctrl+C to stop.\n")
+ print(f"\nHyperView is running at {session.url}")
+
+ if open_browser:
+ session.open_browser()
+
+ try:
+ while True:
+ # Keep the main thread alive so the daemon server thread can run.
+ time.sleep(0.25)
+ if session._server_thread is not None and not session._server_thread.is_alive():
+ raise RuntimeError("HyperView server stopped unexpectedly.")
+ except KeyboardInterrupt:
+ pass
+ finally:
+ session.stop()
+ if session._server_thread is not None:
+ session._server_thread.join(timeout=2.0)
+
+ return session
+
+
+def _is_notebook() -> bool:
+ """Check if running in a notebook environment."""
+ try:
+ from IPython import get_ipython
+ except ImportError:
+ return False
+
+ shell = get_ipython()
+ return shell is not None and shell.__class__.__name__ == "ZMQInteractiveShell"
- url = f"http://{host}:{port}"
- print(f"\n🚀 HyperView is running at {url}")
- print(" Press Ctrl+C to stop.\n")
- if open_browser:
- webbrowser.open(url)
+def _is_colab() -> bool:
+ """Check if running inside a Google Colab notebook runtime."""
+ if os.environ.get("COLAB_RELEASE_TAG"):
+ return True
+ try:
+ import google.colab # type: ignore[import-not-found]
- uvicorn.run(app, host=host, port=port, log_level="warning")
+ return True
+ except ImportError:
+ return False
diff --git a/src/hyperview/cli.py b/src/hyperview/cli.py
index 9bcd459..f7cc77b 100644
--- a/src/hyperview/cli.py
+++ b/src/hyperview/cli.py
@@ -1,7 +1,5 @@
"""Command-line interface for HyperView."""
-from __future__ import annotations
-
import argparse
import sys
@@ -27,8 +25,33 @@ def main():
demo_parser.add_argument(
"--port",
type=int,
- default=5151,
- help="Port to run the server on (default: 5151)",
+ default=6262,
+ help="Port to run the server on (default: 6262)",
+ )
+ demo_parser.add_argument(
+ "--host",
+ type=str,
+ default="127.0.0.1",
+ help="Host to bind the server to (default: 127.0.0.1)",
+ )
+ demo_parser.add_argument(
+ "--no-browser",
+ action="store_true",
+ help="Do not open a browser window automatically",
+ )
+ demo_parser.add_argument(
+ "--reuse-server",
+ action="store_true",
+ help=(
+ "If the port is already serving HyperView, attach instead of failing. "
+ "For safety, this only attaches when the existing server reports the same dataset name."
+ ),
+ )
+ demo_parser.add_argument(
+ "--model",
+ type=str,
+ default="openai/clip-vit-base-patch32",
+ help="Embedding model to use (default: openai/clip-vit-base-patch32)",
)
# Serve command
@@ -37,60 +60,107 @@ def main():
serve_parser.add_argument(
"--port",
type=int,
- default=5151,
- help="Port to run the server on (default: 5151)",
+ default=6262,
+ help="Port to run the server on (default: 6262)",
+ )
+ serve_parser.add_argument(
+ "--host",
+ type=str,
+ default="127.0.0.1",
+ help="Host to bind the server to (default: 127.0.0.1)",
+ )
+ serve_parser.add_argument(
+ "--no-browser",
+ action="store_true",
+ help="Do not open a browser window automatically",
+ )
+ serve_parser.add_argument(
+ "--reuse-server",
+ action="store_true",
+ help=(
+ "If the port is already serving HyperView, attach instead of failing. "
+ "For safety, this only attaches when the existing server reports the same dataset name."
+ ),
)
args = parser.parse_args()
if args.command == "demo":
- run_demo(args.samples, args.port)
+ run_demo(
+ args.samples,
+ args.port,
+ host=args.host,
+ open_browser=not args.no_browser,
+ reuse_server=args.reuse_server,
+ model=args.model,
+ )
elif args.command == "serve":
- serve_dataset(args.dataset, args.port)
+ serve_dataset(
+ args.dataset,
+ args.port,
+ host=args.host,
+ open_browser=not args.no_browser,
+ reuse_server=args.reuse_server,
+ )
else:
parser.print_help()
sys.exit(1)
-def run_demo(num_samples: int = 500, port: int = 5151):
- """Run a demo with CIFAR-100 data."""
- print("🔄 Loading CIFAR-100 dataset...")
- dataset = Dataset("cifar100_demo")
-
- try:
- count = dataset.add_from_huggingface(
- "uoft-cs/cifar100",
- split="train",
- image_key="img",
- label_key="fine_label",
- max_samples=num_samples,
- )
- print(f"✓ Loaded {count} samples")
- except Exception as e:
- print(f"Failed to load HuggingFace dataset: {e}")
- print("Please ensure 'datasets' is installed: pip install datasets")
- sys.exit(1)
+def run_demo(
+ num_samples: int = 500,
+ port: int = 6262,
+ *,
+ host: str = "127.0.0.1",
+ open_browser: bool = True,
+ reuse_server: bool = False,
+ model: str = "openai/clip-vit-base-patch32",
+) -> None:
+ """Run a demo with CIFAR-10 data."""
+ print("Loading CIFAR-10 dataset...")
+ dataset = Dataset("cifar10_demo")
+
+ added, skipped = dataset.add_from_huggingface(
+ "uoft-cs/cifar10",
+ split="train",
+ image_key="img",
+ label_key="label",
+ max_samples=num_samples,
+ )
+ if skipped > 0:
+ print(f"Loaded {added} samples ({skipped} already present)")
+ else:
+ print(f"Loaded {added} samples")
- print("🔄 Computing embeddings...")
- dataset.compute_embeddings(show_progress=True)
- print("✓ Embeddings computed")
+ print(f"Computing embeddings with {model}...")
+ space_key = dataset.compute_embeddings(model=model, show_progress=True)
+ print("Embeddings computed")
- print("🔄 Computing visualizations...")
- dataset.compute_visualization()
- print("✓ Visualizations ready")
+ print("Computing visualizations...")
+ # Compute both euclidean and poincare layouts
+ dataset.compute_visualization(space_key=space_key, geometry="euclidean")
+ dataset.compute_visualization(space_key=space_key, geometry="poincare")
+ print("Visualizations ready")
- launch(dataset, port=port)
+ launch(dataset, port=port, host=host, open_browser=open_browser, reuse_server=reuse_server)
-def serve_dataset(filepath: str, port: int = 5151):
+def serve_dataset(
+ filepath: str,
+ port: int = 6262,
+ *,
+ host: str = "127.0.0.1",
+ open_browser: bool = True,
+ reuse_server: bool = False,
+) -> None:
"""Serve a saved dataset."""
from hyperview import Dataset, launch
- print(f"🔄 Loading dataset from {filepath}...")
+ print(f"Loading dataset from {filepath}...")
dataset = Dataset.load(filepath)
- print(f"✓ Loaded {len(dataset)} samples")
+ print(f"Loaded {len(dataset)} samples")
- launch(dataset, port=port)
+ launch(dataset, port=port, host=host, open_browser=open_browser, reuse_server=reuse_server)
if __name__ == "__main__":
diff --git a/src/hyperview/core/dataset.py b/src/hyperview/core/dataset.py
index 44cafaf..09cba36 100644
--- a/src/hyperview/core/dataset.py
+++ b/src/hyperview/core/dataset.py
@@ -7,44 +7,115 @@
import uuid
from collections.abc import Callable, Iterator
from pathlib import Path
-from typing import Any
+from typing import Any, cast
import numpy as np
-from datasets import load_dataset
+from datasets import DownloadConfig, load_dataset
from PIL import Image
-from hyperview.core.sample import Sample, SampleFromArray
+from hyperview.core.sample import Sample
+from hyperview.storage.backend import StorageBackend
+from hyperview.storage.schema import make_layout_key
class Dataset:
- """A collection of samples with support for embeddings and visualization."""
+ """A collection of samples with support for embeddings and visualization.
- def __init__(self, name: str | None = None):
+ Datasets are automatically persisted to LanceDB by default, providing:
+ - Automatic persistence (no need to call save())
+ - Vector similarity search
+ - Efficient storage and retrieval
+
+ Embeddings are stored separately from samples, keyed by model_id.
+ Layouts (2D projections) are stored per layout_key (space + method).
+
+ Examples:
+ # Create a new dataset (auto-persisted)
+ dataset = hv.Dataset("my_dataset")
+ dataset.add_images_dir("/path/to/images")
+
+ # Create an in-memory dataset (for testing)
+ dataset = hv.Dataset("temp", persist=False)
+ """
+
+ def __init__(
+ self,
+ name: str | None = None,
+ persist: bool = True,
+ storage: StorageBackend | None = None,
+ ):
"""Initialize a new dataset.
Args:
name: Optional name for the dataset.
+ persist: If True (default), use LanceDB for persistence.
+ If False, use in-memory storage.
+ storage: Optional custom storage backend. If provided, persist is ignored.
"""
self.name = name or f"dataset_{uuid.uuid4().hex[:8]}"
- self._samples: dict[str, Sample] = {}
- self._embedding_computer = None
- self._projection_engine = None
- self._label_colors: dict[str, str] = {}
+
+ # Initialize storage backend
+ if storage is not None:
+ self._storage = storage
+ elif persist:
+ from hyperview.storage import LanceDBBackend, StorageConfig
+
+ config = StorageConfig.default()
+ self._storage = LanceDBBackend(self.name, config)
+ else:
+ from hyperview.storage import MemoryBackend
+ self._storage = MemoryBackend(self.name)
def __len__(self) -> int:
- return len(self._samples)
+ return len(self._storage)
def __iter__(self) -> Iterator[Sample]:
- return iter(self._samples.values())
+ return iter(self._storage)
def __getitem__(self, sample_id: str) -> Sample:
- return self._samples[sample_id]
+ sample = self._storage.get_sample(sample_id)
+ if sample is None:
+ raise KeyError(sample_id)
+ return sample
def add_sample(self, sample: Sample) -> None:
- """Add a sample to the dataset."""
- self._samples[sample.id] = sample
- if sample.label and sample.label not in self._label_colors:
- self._assign_label_color(sample.label)
+ """Add a sample to the dataset (idempotent)."""
+ self._storage.add_sample(sample)
+
+ def _ingest_samples(
+ self,
+ samples: list[Sample],
+ *,
+ skip_existing: bool = True,
+ ) -> tuple[int, int]:
+ """Shared ingestion helper for batch sample insertion.
+
+ Handles deduplication uniformly.
+
+ Args:
+ samples: List of samples to ingest.
+ skip_existing: If True, skip samples that already exist in storage.
+
+ Returns:
+ Tuple of (num_added, num_skipped).
+ """
+ if not samples:
+ return 0, 0
+
+ skipped = 0
+ if skip_existing:
+ all_ids = [s.id for s in samples]
+ existing_ids = self._storage.get_existing_ids(all_ids)
+ if existing_ids:
+ samples = [s for s in samples if s.id not in existing_ids]
+ skipped = len(all_ids) - len(samples)
+
+ if not samples:
+ return 0, skipped
+
+ self._storage.add_samples_batch(samples)
+
+ return len(samples), skipped
def add_image(
self,
@@ -82,7 +153,8 @@ def add_images_dir(
extensions: tuple[str, ...] = (".jpg", ".jpeg", ".png", ".webp"),
label_from_folder: bool = False,
recursive: bool = True,
- ) -> int:
+ skip_existing: bool = True,
+ ) -> tuple[int, int]:
"""Add all images from a directory.
Args:
@@ -90,24 +162,32 @@ def add_images_dir(
extensions: Tuple of valid file extensions.
label_from_folder: If True, use parent folder name as label.
recursive: If True, search subdirectories.
+ skip_existing: If True (default), skip samples that already exist.
Returns:
- Number of images added.
+ Tuple of (num_added, num_skipped).
"""
- directory = Path(directory)
- if not directory.exists():
- raise ValueError(f"Directory does not exist: {directory}")
+ directory_path = Path(directory)
+ if not directory_path.exists():
+ raise ValueError(f"Directory does not exist: {directory_path}")
- count = 0
+ samples = []
pattern = "**/*" if recursive else "*"
- for path in directory.glob(pattern):
+ for path in directory_path.glob(pattern):
if path.is_file() and path.suffix.lower() in extensions:
label = path.parent.name if label_from_folder else None
- self.add_image(str(path), label=label)
- count += 1
-
- return count
+ sample_id = hashlib.md5(str(path).encode()).hexdigest()[:12]
+ sample = Sample(
+ id=sample_id,
+ filepath=str(path),
+ label=label,
+ metadata={},
+ )
+ samples.append(sample)
+
+ # Use shared ingestion helper
+ return self._ingest_samples(samples, skip_existing=skip_existing)
def add_from_huggingface(
self,
@@ -117,9 +197,18 @@ def add_from_huggingface(
label_key: str | None = "fine_label",
label_names_key: str | None = None,
max_samples: int | None = None,
- ) -> int:
+ shuffle: bool = False,
+ seed: int = 42,
+ show_progress: bool = True,
+ skip_existing: bool = True,
+ image_format: str = "auto",
+ ) -> tuple[int, int]:
"""Load samples from a HuggingFace dataset.
+ Images are downloaded to disk at ~/.hyperview/media/huggingface/{dataset}/{split}/
+ This ensures images persist across sessions and embeddings can be computed
+ at any time, similar to FiftyOne's approach.
+
Args:
dataset_name: Name of the HuggingFace dataset.
split: Dataset split to use.
@@ -127,33 +216,85 @@ def add_from_huggingface(
label_key: Key for the label column (can be None).
label_names_key: Key for label names in dataset info.
max_samples: Maximum number of samples to load.
+ shuffle: If True, shuffle the dataset before sampling (ensures diverse classes).
+ seed: Random seed for shuffling (default: 42).
+ show_progress: Whether to print progress.
+ skip_existing: If True (default), skip samples that already exist in storage.
+ image_format: Image format to save: "auto" (detect from source, fallback PNG),
+ "png" (lossless), or "jpeg" (smaller files).
Returns:
- Number of samples added.
+ Tuple of (num_added, num_skipped).
"""
- ds = load_dataset(dataset_name, split=split)
+ from hyperview.storage import StorageConfig
+
+ # HuggingFace `load_dataset()` can be surprisingly slow even when the dataset
+ # is already cached, due to Hub reachability checks in some environments.
+ # For a fast path, first try loading in "offline" mode (cache-only), and
+ # fall back to an online load if the dataset isn't cached yet.
+ try:
+ ds = cast(
+ Any,
+ load_dataset(
+ dataset_name,
+ split=split,
+ download_config=DownloadConfig(local_files_only=True),
+ ),
+ )
+ except Exception:
+ ds = cast(Any, load_dataset(dataset_name, split=split))
+
+ source_fingerprint = ds._fingerprint if hasattr(ds, "_fingerprint") else None
+
+ dataset_size = len(ds)
+ total = dataset_size if max_samples is None else min(dataset_size, max_samples)
+
+ # Select source row indices explicitly so sampled subsets are clear and
+ # sample IDs remain stable for the same underlying row.
+ selected_indices: list[int] | None = None
+ if shuffle:
+ rng = np.random.default_rng(seed)
+ selected_indices = rng.permutation(dataset_size)[:total].tolist()
+ ds = ds.select(selected_indices)
+ elif max_samples is not None:
+ selected_indices = list(range(total))
+ ds = ds.select(selected_indices)
# Get label names if available
label_names = None
if label_key and label_names_key:
if label_names_key in ds.features:
- label_names = ds.features[label_names_key].names
+ label_names = ds.features[label_names_key].names
elif label_key:
if hasattr(ds.features[label_key], "names"):
label_names = ds.features[label_key].names
- count = 0
- total = len(ds) if max_samples is None else min(len(ds), max_samples)
+ # Extract dataset metadata for robust sample IDs
+ config_name = getattr(ds.info, "config_name", None) or "default"
+ fingerprint = source_fingerprint[:8] if source_fingerprint else "unknown"
+ version = str(ds.info.version) if ds.info.version else None
+
+ # Get media directory for this dataset
+ config = StorageConfig.default()
+ media_dir = config.get_huggingface_media_dir(dataset_name, split)
+
+ samples = []
+
+ if show_progress:
+ print(f"Loading {total} samples from {dataset_name}...")
+
+ iterator = range(total)
- for i in range(total):
+ for i in iterator:
item = ds[i]
+ source_index = selected_indices[i] if selected_indices is not None else i
image = item[image_key]
# Handle PIL Image or numpy array
if isinstance(image, Image.Image):
- image_array = np.array(image)
+ pil_image = image
else:
- image_array = image
+ pil_image = Image.fromarray(np.asarray(image))
# Get label
label = None
@@ -164,179 +305,397 @@ def add_from_huggingface(
else:
label = str(label_idx)
- sample = SampleFromArray.from_array(
- id=f"{dataset_name.replace('/', '_')}_{split}_{i}",
- image_array=image_array,
+ # Generate robust sample ID with config and fingerprint
+ safe_name = dataset_name.replace("/", "_")
+ sample_id = f"{safe_name}_{config_name}_{fingerprint}_{split}_{source_index}"
+
+ # Determine image format and extension
+ if image_format == "auto":
+ # Try to preserve original format, fallback to PNG
+ original_format = getattr(pil_image, "format", None)
+ if original_format in ("JPEG", "JPG"):
+ save_format = "JPEG"
+ ext = ".jpg"
+ else:
+ save_format = "PNG"
+ ext = ".png"
+ elif image_format == "jpeg":
+ save_format = "JPEG"
+ ext = ".jpg"
+ else:
+ save_format = "PNG"
+ ext = ".png"
+
+ # Enhanced metadata with dataset info
+ metadata = {
+ "source": dataset_name,
+ "config": config_name,
+ "split": split,
+ "index": source_index,
+ "fingerprint": source_fingerprint,
+ "version": version,
+ }
+
+ image_path = media_dir / f"{sample_id}{ext}"
+ if not image_path.exists():
+ if save_format == "JPEG" or pil_image.mode in ("RGBA", "P", "L"):
+ pil_image = pil_image.convert("RGB")
+ pil_image.save(image_path, format=save_format)
+
+ sample = Sample(
+ id=sample_id,
+ filepath=str(image_path),
label=label,
- metadata={"source": dataset_name, "split": split, "index": i},
+ metadata=metadata,
)
- self.add_sample(sample)
- count += 1
- return count
+ samples.append(sample)
+
+ # Use shared ingestion helper
+ num_added, skipped = self._ingest_samples(samples, skip_existing=skip_existing)
+
+ if show_progress:
+ print(f"Images saved to: {media_dir}")
+ if skipped > 0:
+ print(f"Skipped {skipped} existing samples")
+
+ return num_added, skipped
def compute_embeddings(
self,
- model: str = "clip",
+ model: str,
+ *,
+ provider: str | None = None,
+ checkpoint: str | None = None,
batch_size: int = 32,
show_progress: bool = True,
- ) -> None:
- """Compute embeddings for all samples.
+ **provider_kwargs: Any,
+ ) -> str:
+ """Compute embeddings for samples that don't have them yet.
+
+ Embeddings are stored in a dedicated space keyed by the embedding spec.
Args:
- model: Embedding model to use.
+ model: Model identifier (required). Use a HuggingFace model_id
+ (e.g. 'openai/clip-vit-base-patch32') for embed-anything, or a
+ hyper-models name (e.g. 'hycoclip-vit-s') for hyperbolic embeddings.
+ provider: Explicit provider identifier. If not specified, auto-detected:
+ 'hyper-models' if model matches a hyper-models name, else 'embed-anything'.
+ Available providers: `hyperview.embeddings.list_embedding_providers()`.
+ checkpoint: Checkpoint path/URL (hf://... or local path) for weight-only models.
batch_size: Batch size for processing.
show_progress: Whether to show progress bar.
- """
- from hyperview.embeddings.compute import EmbeddingComputer
+ **provider_kwargs: Additional kwargs passed to the embedding function.
- if self._embedding_computer is None:
- self._embedding_computer = EmbeddingComputer(model=model)
+ Returns:
+ space_key for the embedding space.
+
+ Raises:
+ ValueError: If model is not provided.
+ """
+ if not model:
+ raise ValueError(
+ "model is required. Examples: 'openai/clip-vit-base-patch32' (CLIP), "
+ "'hycoclip-vit-s' (hyperbolic). See hyperview.embeddings.list_embedding_providers()."
+ )
- samples = list(self._samples.values())
- embeddings = self._embedding_computer.compute_batch(
- samples, batch_size=batch_size, show_progress=show_progress
+ from hyperview.embeddings.engine import EmbeddingSpec
+ from hyperview.embeddings.pipelines import compute_embeddings
+
+ if provider is None:
+ provider = "embed-anything"
+ try:
+ import hyper_models
+ if model in hyper_models.list_models():
+ provider = "hyper-models"
+ except ImportError:
+ pass
+ spec = EmbeddingSpec(
+ provider=provider,
+ model_id=model,
+ checkpoint=checkpoint,
+ provider_kwargs=provider_kwargs,
)
- for sample, embedding in zip(samples, embeddings):
- sample.embedding = embedding.tolist()
+ space_key, _num_computed, _num_skipped = compute_embeddings(
+ storage=self._storage,
+ spec=spec,
+ batch_size=batch_size,
+ show_progress=show_progress,
+ )
+ return space_key
def compute_visualization(
self,
+ space_key: str | None = None,
method: str = "umap",
+ geometry: str = "euclidean",
n_neighbors: int = 15,
min_dist: float = 0.1,
metric: str = "cosine",
- ) -> None:
+ force: bool = False,
+ ) -> str:
"""Compute 2D projections for visualization.
Args:
+ space_key: Embedding space to project. If None, uses the first available.
method: Projection method ('umap' supported).
+ geometry: Output geometry type ('euclidean' or 'poincare').
n_neighbors: Number of neighbors for UMAP.
min_dist: Minimum distance for UMAP.
metric: Distance metric for UMAP.
- """
- from hyperview.embeddings.projection import ProjectionEngine
-
- if self._projection_engine is None:
- self._projection_engine = ProjectionEngine()
+ force: Force recomputation even if layout exists.
- samples = [s for s in self._samples.values() if s.embedding is not None]
- if not samples:
- raise ValueError("No embeddings computed. Call compute_embeddings() first.")
-
- embeddings = np.array([s.embedding for s in samples])
+ Returns:
+ layout_key for the computed layout.
+ """
+ from hyperview.embeddings.pipelines import compute_layout
- # Compute Euclidean 2D projection
- coords_euclidean = self._projection_engine.project_umap(
- embeddings,
+ return compute_layout(
+ storage=self._storage,
+ space_key=space_key,
+ method=method,
+ geometry=geometry,
n_neighbors=n_neighbors,
min_dist=min_dist,
metric=metric,
+ force=force,
+ show_progress=True,
)
- # Compute Hyperbolic (Poincaré) 2D projection
- coords_hyperbolic = self._projection_engine.project_to_poincare(
- embeddings,
- n_neighbors=n_neighbors,
- min_dist=min_dist,
+ def list_spaces(self) -> list[Any]:
+ """List all embedding spaces in this dataset."""
+ return self._storage.list_spaces()
+
+ def list_layouts(self) -> list[Any]:
+ """List all layouts in this dataset (returns LayoutInfo objects)."""
+ return self._storage.list_layouts()
+
+ def find_similar(
+ self,
+ sample_id: str,
+ k: int = 10,
+ space_key: str | None = None,
+ ) -> list[tuple[Sample, float]]:
+ """Find k most similar samples to a given sample.
+
+ Args:
+ sample_id: ID of the query sample.
+ k: Number of neighbors to return.
+ space_key: Embedding space to search in. If None, uses first available.
+
+ Returns:
+ List of (sample, distance) tuples, sorted by distance ascending.
+ """
+ return self._storage.find_similar(sample_id, k, space_key)
+
+ def find_similar_by_vector(
+ self,
+ vector: list[float],
+ k: int = 10,
+ space_key: str | None = None,
+ ) -> list[tuple[Sample, float]]:
+ """Find k most similar samples to a given vector.
+
+ Args:
+ vector: Query vector.
+ k: Number of neighbors to return.
+ space_key: Embedding space to search in. If None, uses first available.
+
+ Returns:
+ List of (sample, distance) tuples, sorted by distance ascending.
+ """
+ return self._storage.find_similar_by_vector(vector, k, space_key)
+
+ def set_coords(
+ self,
+ geometry: str,
+ ids: list[str],
+ coords: np.ndarray | list[list[float]],
+ ) -> str:
+ """Set precomputed 2D coordinates for visualization.
+
+ Use this when you have precomputed 2D projections and want to skip
+ embedding computation. Useful for smoke tests or external projections.
+
+ Args:
+ geometry: "euclidean" or "poincare".
+ ids: List of sample IDs.
+ coords: (N, 2) array of coordinates.
+
+ Returns:
+ The layout_key for the stored coordinates.
+
+ Example:
+ >>> dataset.set_coords("euclidean", ["s0", "s1"], [[0.1, 0.2], [0.3, 0.4]])
+ >>> dataset.set_coords("poincare", ["s0", "s1"], [[0.1, 0.2], [0.3, 0.4]])
+ >>> hv.launch(dataset)
+ """
+ if geometry not in ("euclidean", "poincare"):
+ raise ValueError(f"geometry must be 'euclidean' or 'poincare', got '{geometry}'")
+
+ coords_arr = np.asarray(coords, dtype=np.float32)
+ if coords_arr.ndim != 2 or coords_arr.shape[1] != 2:
+ raise ValueError(f"coords must be (N, 2), got shape {coords_arr.shape}")
+
+ # Ensure a synthetic space exists (required by launch())
+ space_key = "precomputed"
+ if not any(s.space_key == space_key for s in self._storage.list_spaces()):
+ precomputed_config = {
+ "provider": "precomputed",
+ "geometry": "unknown", # Precomputed coords don't have a source embedding geometry
+ }
+ self._storage.ensure_space(space_key, dim=2, config=precomputed_config)
+
+ layout_key = make_layout_key(space_key, method="precomputed", geometry=geometry)
+
+ # Ensure layout registry entry exists
+ self._storage.ensure_layout(
+ layout_key=layout_key,
+ space_key=space_key,
+ method="precomputed",
+ geometry=geometry,
+ params=None,
)
- for sample, coord_e, coord_h in zip(samples, coords_euclidean, coords_hyperbolic):
- sample.embedding_2d = coord_e.tolist()
- sample.embedding_2d_hyperbolic = coord_h.tolist()
-
- def _assign_label_color(self, label: str) -> None:
- """Assign a color to a label."""
- # Use a predefined color palette
- colors = [
- "#e6194b", "#3cb44b", "#ffe119", "#4363d8", "#f58231",
- "#911eb4", "#46f0f0", "#f032e6", "#bcf60c", "#fabebe",
- "#008080", "#e6beff", "#9a6324", "#fffac8", "#800000",
- "#aaffc3", "#808000", "#ffd8b1", "#000075", "#808080",
- ]
- idx = len(self._label_colors) % len(colors)
- self._label_colors[label] = colors[idx]
-
- def get_label_colors(self) -> dict[str, str]:
- """Get the color mapping for labels."""
- return self._label_colors.copy()
+ self._storage.add_layout_coords(layout_key, list(ids), coords_arr)
+ return layout_key
@property
def samples(self) -> list[Sample]:
"""Get all samples as a list."""
- return list(self._samples.values())
+ return self._storage.get_all_samples()
@property
def labels(self) -> list[str]:
"""Get unique labels in the dataset."""
- return list(set(s.label for s in self._samples.values() if s.label))
+ return self._storage.get_unique_labels()
def filter(self, predicate: Callable[[Sample], bool]) -> list[Sample]:
"""Filter samples based on a predicate function."""
- return [s for s in self._samples.values() if predicate(s)]
+ return self._storage.filter(predicate)
- def to_dict(self) -> dict[str, Any]:
- """Convert dataset to dictionary for serialization."""
- return {
- "name": self.name,
- "num_samples": len(self),
- "labels": self.labels,
- "label_colors": self._label_colors,
- }
+ def get_samples_paginated(
+ self,
+ offset: int = 0,
+ limit: int = 100,
+ label: str | None = None,
+ ) -> tuple[list[Sample], int]:
+ """Get paginated samples.
+
+ This avoids loading all samples into memory and is used by the server
+ API for efficient pagination.
+ """
+ return self._storage.get_samples_paginated(offset=offset, limit=limit, label=label)
+
+ def get_samples_by_ids(self, sample_ids: list[str]) -> list[Sample]:
+ """Retrieve multiple samples by ID.
+
+ The returned list is aligned to the input order and skips missing IDs.
+ """
+ return self._storage.get_samples_by_ids(sample_ids)
+
+ def get_visualization_data(
+ self,
+ layout_key: str,
+ ) -> tuple[list[str], list[str | None], np.ndarray]:
+ """Get visualization data (ids, labels, coords) for a layout."""
+ layout_ids, layout_coords = self._storage.get_layout_coords(layout_key)
+ if not layout_ids:
+ return [], [], np.empty((0, 2), dtype=np.float32)
+
+ labels_by_id = self._storage.get_labels_by_ids(layout_ids)
+
+ ids: list[str] = []
+ labels: list[str | None] = []
+ coords: list[np.ndarray] = []
+
+ for i, sample_id in enumerate(layout_ids):
+ if sample_id in labels_by_id:
+ ids.append(sample_id)
+ labels.append(labels_by_id[sample_id])
+ coords.append(layout_coords[i])
+
+ if not coords:
+ return [], [], np.empty((0, 2), dtype=np.float32)
+
+ return ids, labels, np.asarray(coords, dtype=np.float32)
+
+
+ def get_lasso_candidates_aabb(
+ self,
+ *,
+ layout_key: str,
+ x_min: float,
+ x_max: float,
+ y_min: float,
+ y_max: float,
+ ) -> tuple[list[str], np.ndarray]:
+ """Return candidate (id, xy) rows within an AABB for a layout."""
+ return self._storage.get_lasso_candidates_aabb(
+ layout_key=layout_key,
+ x_min=x_min,
+ x_max=x_max,
+ y_min=y_min,
+ y_max=y_max,
+ )
def save(self, filepath: str, include_thumbnails: bool = True) -> None:
- """Save dataset to a JSON file.
+ """Export dataset to a JSON file.
Args:
filepath: Path to save the JSON file.
include_thumbnails: Whether to include cached thumbnails.
"""
- # Cache thumbnails before saving if requested
+ samples = self._storage.get_all_samples()
if include_thumbnails:
- for s in self._samples.values():
+ for s in samples:
s.cache_thumbnail()
data = {
"name": self.name,
- "label_colors": self._label_colors,
"samples": [
{
"id": s.id,
"filepath": s.filepath,
"label": s.label,
"metadata": s.metadata,
- "embedding": s.embedding,
- "embedding_2d": s.embedding_2d,
- "embedding_2d_hyperbolic": s.embedding_2d_hyperbolic,
"thumbnail_base64": s.thumbnail_base64 if include_thumbnails else None,
}
- for s in self._samples.values()
+ for s in samples
],
}
with open(filepath, "w") as f:
json.dump(data, f)
@classmethod
- def load(cls, filepath: str) -> Dataset:
- """Load dataset from a JSON file."""
+ def load(cls, filepath: str, persist: bool = False) -> "Dataset":
+ """Load dataset from a JSON file.
+
+ Args:
+ filepath: Path to the JSON file.
+ persist: If True, persist the loaded data to LanceDB.
+ If False (default), keep in memory only.
+
+ Returns:
+ Dataset instance.
+ """
with open(filepath) as f:
data = json.load(f)
- dataset = cls(name=data["name"])
- dataset._label_colors = data.get("label_colors", {})
+ dataset = cls(name=data["name"], persist=persist)
+ # Add samples
+ samples = []
for s_data in data["samples"]:
sample = Sample(
id=s_data["id"],
filepath=s_data["filepath"],
label=s_data.get("label"),
metadata=s_data.get("metadata", {}),
- embedding=s_data.get("embedding"),
- embedding_2d=s_data.get("embedding_2d"),
- embedding_2d_hyperbolic=s_data.get("embedding_2d_hyperbolic"),
thumbnail_base64=s_data.get("thumbnail_base64"),
)
- dataset.add_sample(sample)
+ samples.append(sample)
+ dataset._storage.add_samples_batch(samples)
return dataset
diff --git a/src/hyperview/core/sample.py b/src/hyperview/core/sample.py
index 97b0dd6..d0e3f38 100644
--- a/src/hyperview/core/sample.py
+++ b/src/hyperview/core/sample.py
@@ -1,30 +1,28 @@
"""Sample class representing a single data point in a dataset."""
-from __future__ import annotations
-
import base64
import io
from pathlib import Path
from typing import Any
-import numpy as np
from PIL import Image
from pydantic import BaseModel, Field
class Sample(BaseModel):
- """A single sample in a HyperView dataset."""
+ """A single sample in a HyperView dataset.
+
+ Samples are pure metadata containers. Embeddings and layouts are stored
+ separately in dedicated tables (per embedding space / per layout).
+ """
id: str = Field(..., description="Unique identifier for the sample")
filepath: str = Field(..., description="Path to the image file")
label: str | None = Field(default=None, description="Label for the sample")
metadata: dict[str, Any] = Field(default_factory=dict, description="Additional metadata")
- embedding: list[float] | None = Field(default=None, description="High-dimensional embedding")
- embedding_2d: list[float] | None = Field(default=None, description="2D projected embedding")
- embedding_2d_hyperbolic: list[float] | None = Field(
- default=None, description="2D hyperbolic (Poincaré) embedding"
- )
thumbnail_base64: str | None = Field(default=None, description="Cached thumbnail as base64")
+ width: int | None = Field(default=None, description="Image width in pixels")
+ height: int | None = Field(default=None, description="Image height in pixels")
model_config = {"arbitrary_types_allowed": True}
@@ -38,78 +36,60 @@ def load_image(self) -> Image.Image:
return Image.open(self.filepath)
def get_thumbnail(self, size: tuple[int, int] = (128, 128)) -> Image.Image:
- """Get a thumbnail of the image."""
+ """Get a thumbnail of the image. Also captures original dimensions."""
img = self.load_image()
+ # Capture original dimensions while we have the image loaded
+ if self.width is None or self.height is None:
+ self.width, self.height = img.size
img.thumbnail(size, Image.Resampling.LANCZOS)
return img
- def get_thumbnail_base64(self, size: tuple[int, int] = (128, 128)) -> str:
- """Get thumbnail as base64 encoded string."""
- # Return cached thumbnail if available
- if self.thumbnail_base64:
- return self.thumbnail_base64
-
+ def _encode_thumbnail(self, size: tuple[int, int] = (128, 128)) -> str:
+ """Encode thumbnail as base64 JPEG."""
thumb = self.get_thumbnail(size)
- # Convert to RGB if necessary (for PNG with alpha)
if thumb.mode in ("RGBA", "P"):
thumb = thumb.convert("RGB")
buffer = io.BytesIO()
thumb.save(buffer, format="JPEG", quality=85)
return base64.b64encode(buffer.getvalue()).decode("utf-8")
+ def get_thumbnail_base64(self, size: tuple[int, int] = (128, 128)) -> str:
+ """Get thumbnail as base64 encoded string."""
+ return self.thumbnail_base64 or self._encode_thumbnail(size)
+
def cache_thumbnail(self, size: tuple[int, int] = (128, 128)) -> None:
"""Cache the thumbnail as base64 for persistence."""
if self.thumbnail_base64 is None:
- thumb = self.get_thumbnail(size)
- if thumb.mode in ("RGBA", "P"):
- thumb = thumb.convert("RGB")
- buffer = io.BytesIO()
- thumb.save(buffer, format="JPEG", quality=85)
- self.thumbnail_base64 = base64.b64encode(buffer.getvalue()).decode("utf-8")
+ self.thumbnail_base64 = self._encode_thumbnail(size)
def to_api_dict(self, include_thumbnail: bool = True) -> dict[str, Any]:
"""Convert to dictionary for API response."""
+ # Ensure dimensions are populated (loads image if needed but not cached)
+ if self.width is None or self.height is None:
+ self.ensure_dimensions()
+
data = {
"id": self.id,
"filepath": self.filepath,
"filename": self.filename,
"label": self.label,
"metadata": self.metadata,
+ "width": self.width,
+ "height": self.height,
}
if include_thumbnail:
data["thumbnail"] = self.get_thumbnail_base64()
- if self.embedding_2d:
- data["embedding_2d"] = self.embedding_2d
- if self.embedding_2d_hyperbolic:
- data["embedding_2d_hyperbolic"] = self.embedding_2d_hyperbolic
return data
+ def ensure_dimensions(self) -> None:
+ """Load image dimensions if not already set."""
+ if self.width is None or self.height is None:
+ try:
+ img = self.load_image()
+ self.width, self.height = img.size
+ except Exception:
+ # If image can't be loaded, leave as None
+ pass
+
-class SampleFromArray(Sample):
- """A sample created from a numpy array (e.g., from HuggingFace datasets)."""
-
- _image_array: np.ndarray | None = None
-
- @classmethod
- def from_array(
- cls,
- id: str,
- image_array: np.ndarray,
- label: str | None = None,
- metadata: dict[str, Any] | None = None,
- ) -> SampleFromArray:
- """Create a sample from a numpy array."""
- sample = cls(
- id=id,
- filepath=f"memory://{id}",
- label=label,
- metadata=metadata or {},
- )
- sample._image_array = image_array
- return sample
- def load_image(self) -> Image.Image:
- """Load the image from the array."""
- if self._image_array is not None:
- return Image.fromarray(self._image_array)
- return super().load_image()
diff --git a/src/hyperview/core/selection.py b/src/hyperview/core/selection.py
new file mode 100644
index 0000000..1dee065
--- /dev/null
+++ b/src/hyperview/core/selection.py
@@ -0,0 +1,53 @@
+"""Selection / geometry helpers.
+
+This module contains small, backend-agnostic utilities used by selection endpoints
+(e.g. lasso selection over 2D embeddings).
+"""
+
+from __future__ import annotations
+
+import numpy as np
+
+
+def points_in_polygon(points_xy: np.ndarray, polygon_xy: np.ndarray) -> np.ndarray:
+ """Vectorized point-in-polygon (even-odd rule / ray casting).
+
+ Args:
+ points_xy: Array of shape (m, 2) with point coordinates.
+ polygon_xy: Array of shape (n, 2) with polygon vertices.
+
+ Returns:
+ Boolean mask of length m, True where point lies inside polygon.
+
+ Notes:
+ Boundary points may be classified as outside depending on floating point
+ ties (common for lasso selection tools).
+ """
+ if polygon_xy.shape[0] < 3:
+ return np.zeros((points_xy.shape[0],), dtype=bool)
+
+ x = points_xy[:, 0]
+ y = points_xy[:, 1]
+ poly_x = polygon_xy[:, 0]
+ poly_y = polygon_xy[:, 1]
+
+ inside = np.zeros((points_xy.shape[0],), dtype=bool)
+ j = polygon_xy.shape[0] - 1
+
+ for i in range(polygon_xy.shape[0]):
+ xi = poly_x[i]
+ yi = poly_y[i]
+ xj = poly_x[j]
+ yj = poly_y[j]
+
+ # Half-open y-interval to avoid double-counting vertices.
+ intersects = (yi > y) != (yj > y)
+
+ denom = yj - yi
+ # denom == 0 => intersects is always False; add tiny epsilon to avoid warnings.
+ x_intersect = (xj - xi) * (y - yi) / (denom + 1e-30) + xi
+
+ inside ^= intersects & (x < x_intersect)
+ j = i
+
+ return inside
diff --git a/src/hyperview/embeddings/__init__.py b/src/hyperview/embeddings/__init__.py
index 2d83dcc..24ba7aa 100644
--- a/src/hyperview/embeddings/__init__.py
+++ b/src/hyperview/embeddings/__init__.py
@@ -1,6 +1,31 @@
-"""Embedding computation and projection modules."""
+"""Embedding computation and projection."""
from hyperview.embeddings.compute import EmbeddingComputer
-from hyperview.embeddings.projection import ProjectionEngine
+from hyperview.embeddings.engine import (
+ EmbeddingSpec,
+ get_engine,
+ get_provider_info,
+ list_embedding_providers,
+)
-__all__ = ["EmbeddingComputer", "ProjectionEngine"]
+# Register HyperView providers into LanceDB registry.
+import hyperview.embeddings.providers.lancedb_providers as _lancedb_providers # noqa: F401
+
+
+def __getattr__(name: str):
+ """Lazy import for heavy dependencies (UMAP/numba)."""
+ if name == "ProjectionEngine":
+ from hyperview.embeddings.projection import ProjectionEngine
+ return ProjectionEngine
+ raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
+__all__ = [
+ "EmbeddingComputer",
+ "EmbeddingSpec",
+ "ProjectionEngine",
+ # Provider utilities
+ "get_engine",
+ "get_provider_info",
+ "list_embedding_providers",
+]
diff --git a/src/hyperview/embeddings/compute.py b/src/hyperview/embeddings/compute.py
index 27d68b7..d2026b5 100644
--- a/src/hyperview/embeddings/compute.py
+++ b/src/hyperview/embeddings/compute.py
@@ -1,103 +1,75 @@
-"""Embedding computation using EmbedAnything."""
-
-from __future__ import annotations
+"""Image embedding computation via EmbedAnything."""
import os
import tempfile
+from pathlib import Path
-import embed_anything
import numpy as np
-from embed_anything import EmbeddingModel, WhichModel
+from embed_anything import EmbeddingModel
from PIL import Image
-try:
- from tqdm import tqdm
-except ImportError:
- tqdm = None
-
from hyperview.core.sample import Sample
class EmbeddingComputer:
- """Compute embeddings for images using EmbedAnything."""
+ """Compute embeddings for image samples using EmbedAnything."""
- def __init__(self, model: str = "clip"):
+ def __init__(self, model: str):
"""Initialize the embedding computer.
Args:
- model: Model to use for embeddings.
+ model: HuggingFace model ID to load via EmbedAnything.
"""
- self.model_name = model
- self._model = None
- self._initialized = False
-
- def _init_model(self) -> None:
- """Lazily initialize the model."""
- if self._initialized:
- return
- # Use CLIP model by default
- self._model = EmbeddingModel.from_pretrained_hf(
- WhichModel.Clip,
- model_id="openai/clip-vit-base-patch32",
- )
- self._embed_anything = embed_anything
- self._initialized = True
+ if not model or not model.strip():
+ raise ValueError("model must be a non-empty HuggingFace model_id")
- def _load_rgb_image(self, sample: Sample) -> Image.Image:
- """Load an image and ensure it is in RGB mode."""
- image = sample.load_image()
- if image.mode != "RGB":
- image = image.convert("RGB")
- return image
+ self.model_id = model
+ self._model: EmbeddingModel | None = None
- def _embed_with_model(
- self,
- sample: Sample,
- image: Image.Image | None = None,
- ) -> np.ndarray | None:
- """Attempt to embed a sample via embed_anything, handling memory-backed files."""
- path = sample.filepath
- temp_path: str | None = None
+ def _get_model(self) -> EmbeddingModel:
+ """Lazily initialize the EmbedAnything model."""
+ if self._model is None:
+ self._model = EmbeddingModel.from_pretrained_hf(model_id=self.model_id)
+ return self._model
+ def _load_rgb_image(self, sample: Sample) -> Image.Image:
+ """Load an image and normalize it to RGB.
+
+ For file-backed samples, returns an in-memory copy and closes the file
+ handle immediately to avoid leaking descriptors during batch processing.
+ """
+ with sample.load_image() as img:
+ img.load()
+ if img.mode != "RGB":
+ return img.convert("RGB")
+ return img.copy()
+
+ def _embed_file(self, file_path: str) -> np.ndarray:
+ model = self._get_model()
+ result = model.embed_file(file_path)
+
+ if not result:
+ raise RuntimeError(f"EmbedAnything returned no embeddings for: {file_path}")
+ if len(result) != 1:
+ raise RuntimeError(
+ f"Expected 1 embedding for an image file, got {len(result)}: {file_path}"
+ )
+
+ return np.asarray(result[0].embedding, dtype=np.float32)
+
+ def _embed_pil_image(self, image: Image.Image) -> np.ndarray:
+ temp_fd, temp_path = tempfile.mkstemp(suffix=".png")
+ os.close(temp_fd)
try:
- if path.startswith("memory://"):
- if image is None:
- image = self._load_rgb_image(sample)
- temp_file = tempfile.NamedTemporaryFile(suffix=".png", delete=False)
- image.save(temp_file, format="PNG")
- temp_file.close()
- temp_path = temp_file.name
- path = temp_path
-
- result = self._embed_anything.embed_file(path, embedder=self._model)
- if result:
- return np.array(result[0].embedding, dtype=np.float32)
+ image.save(temp_path, format="PNG")
+ return self._embed_file(temp_path)
finally:
- if temp_path and os.path.exists(temp_path):
- os.remove(temp_path)
-
- return None
+ Path(temp_path).unlink(missing_ok=True)
def compute_single(self, sample: Sample) -> np.ndarray:
- """Compute embedding for a single sample.
-
- Args:
- sample: Sample to compute embedding for.
-
- Returns:
- Embedding as numpy array.
- """
- self._init_model()
-
- pil_image = None
- if sample.filepath.startswith("memory://"):
- pil_image = self._load_rgb_image(sample)
-
- embedding = self._embed_with_model(sample, image=pil_image)
- if embedding is None:
- raise RuntimeError(f"Failed to compute embedding for sample {sample.id}")
-
- return embedding
+ """Compute embedding for a single sample."""
+ image = self._load_rgb_image(sample)
+ return self._embed_pil_image(image)
def compute_batch(
self,
@@ -105,46 +77,13 @@ def compute_batch(
batch_size: int = 32,
show_progress: bool = True,
) -> list[np.ndarray]:
- """Compute embeddings for a batch of samples.
-
- Args:
- samples: List of samples to compute embeddings for.
- batch_size: Number of samples to process at once.
- show_progress: Whether to show a progress bar.
-
- Returns:
- List of embeddings as numpy arrays.
- """
- self._init_model()
-
- embeddings = []
- total = len(samples)
-
- if show_progress and tqdm is not None:
- iterator = tqdm(range(0, total, batch_size), desc="Computing embeddings")
- else:
- if show_progress and tqdm is None:
- print(f"Computing embeddings for {total} samples...")
- iterator = range(0, total, batch_size)
-
- for i in iterator:
- batch = samples[i : i + batch_size]
- batch_embeddings = []
-
- for sample in batch:
- pil_image = None
- if sample.filepath.startswith("memory://"):
- pil_image = self._load_rgb_image(sample)
-
- embedding = self._embed_with_model(sample, image=pil_image)
- if embedding is None:
- raise RuntimeError(
- f"Failed to compute embedding for sample {sample.id}"
- )
-
- batch_embeddings.append(embedding)
+ """Compute embeddings for a list of samples."""
+ if batch_size <= 0:
+ raise ValueError("batch_size must be > 0")
+ self._get_model()
- embeddings.extend(batch_embeddings)
+ if show_progress:
+ print(f"Computing embeddings for {len(samples)} samples...")
- return embeddings
+ return [self.compute_single(sample) for sample in samples]
diff --git a/src/hyperview/embeddings/engine.py b/src/hyperview/embeddings/engine.py
new file mode 100644
index 0000000..a82e8ac
--- /dev/null
+++ b/src/hyperview/embeddings/engine.py
@@ -0,0 +1,330 @@
+"""Embedding spec + engine built on LanceDB's embedding registry."""
+
+from __future__ import annotations
+
+import hashlib
+import json
+from dataclasses import dataclass, field
+from typing import Any, Literal
+
+import numpy as np
+
+# Register HyperView providers into LanceDB registry.
+import hyperview.embeddings.providers.lancedb_providers as _lancedb_providers # noqa: F401
+
+__all__ = [
+ "EmbeddingSpec",
+ "EmbeddingEngine",
+ "get_engine",
+ "list_embedding_providers",
+ "get_provider_info",
+]
+
+HYPERBOLIC_PROVIDERS = frozenset({"hyper-models"})
+
+
+@dataclass
+class EmbeddingSpec:
+ """Specification for an embedding model.
+
+ All providers live in the LanceDB registry. HyperView's custom providers
+ (embed-anything, hyper-models) are registered on import.
+
+ Attributes:
+ provider: Provider identifier (e.g., 'embed-anything', 'hyper-models', 'open-clip')
+ model_id: Model identifier (HuggingFace model_id, checkpoint name, etc.)
+ checkpoint: Optional checkpoint path/URL for weight-only models
+ provider_kwargs: Additional kwargs passed to the embedding function
+ modality: What input type this embedder handles
+ """
+
+ provider: str
+ model_id: str | None = None
+ checkpoint: str | None = None
+ provider_kwargs: dict[str, Any] = field(default_factory=dict)
+ modality: Literal["image", "text", "multimodal"] = "image"
+
+ @property
+ def geometry(self) -> Literal["euclidean", "hyperboloid"]:
+ """Get the output geometry for this spec."""
+
+ if self.provider == "hyper-models":
+ model_name = self.model_id or self.provider_kwargs.get("name")
+ if model_name is None:
+ return "hyperboloid"
+ import hyper_models
+
+ geom = str(hyper_models.get_model_info(str(model_name)).geometry)
+ return "hyperboloid" if geom in ("hyperboloid", "poincare") else "euclidean"
+
+ if self.provider in HYPERBOLIC_PROVIDERS:
+ return "hyperboloid"
+ return "euclidean"
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to JSON-serializable dict for persistence."""
+ d: dict[str, Any] = {
+ "provider": self.provider,
+ "modality": self.modality,
+ "geometry": self.geometry,
+ }
+ if self.model_id:
+ d["model_id"] = self.model_id
+ if self.checkpoint:
+ d["checkpoint"] = self.checkpoint
+ if self.provider_kwargs:
+ d["provider_kwargs"] = self.provider_kwargs
+ return d
+
+ @classmethod
+ def from_dict(cls, d: dict[str, Any]) -> EmbeddingSpec:
+ """Create from dict (e.g., loaded from JSON)."""
+ return cls(
+ provider=d["provider"],
+ model_id=d.get("model_id"),
+ checkpoint=d.get("checkpoint"),
+ provider_kwargs=d.get("provider_kwargs", {}),
+ modality=d.get("modality", "image"),
+ )
+
+ def content_hash(self) -> str:
+ """Generate a short hash of the spec for collision-resistant keys."""
+ content = json.dumps(self.to_dict(), sort_keys=True)
+ return hashlib.sha256(content.encode()).hexdigest()[:12]
+
+ def make_space_key(self) -> str:
+ """Generate a collision-resistant space_key from this spec.
+
+ Format: {provider}__{slugified_model_id}__{content_hash}
+ """
+ from hyperview.storage.schema import slugify_model_id
+
+ model_part = self.model_id or self.checkpoint or "default"
+ slug = slugify_model_id(model_part)
+ content_hash = self.content_hash()
+ return f"{self.provider}__{slug}__{content_hash}"
+
+
+class EmbeddingEngine:
+ """Embedding engine using LanceDB registry.
+
+ All providers are accessed through the LanceDB embedding registry.
+ HyperView providers are registered automatically on import.
+ """
+
+ def __init__(self) -> None:
+ self._cache: dict[str, Any] = {} # spec_hash -> embedding function
+
+ def get_function(self, spec: EmbeddingSpec) -> Any:
+ """Get an embedding function from LanceDB registry.
+
+ Args:
+ spec: Embedding specification.
+
+ Returns:
+ LanceDB EmbeddingFunction instance.
+
+ Raises:
+ ValueError: If provider not found in registry.
+ """
+ cache_key = spec.content_hash()
+ if cache_key in self._cache:
+ return self._cache[cache_key]
+
+ from lancedb.embeddings import get_registry
+
+ registry = get_registry()
+
+ # Get provider factory from registry
+ try:
+ factory = registry.get(spec.provider)
+ except KeyError:
+ available = list_embedding_providers()
+ raise ValueError(
+ f"Unknown provider: '{spec.provider}'. "
+ f"Available: {', '.join(sorted(available))}"
+ ) from None
+
+ create_kwargs: dict[str, Any] = {}
+ if spec.model_id:
+ create_kwargs["name"] = spec.model_id
+
+ if spec.checkpoint:
+ create_kwargs["checkpoint"] = spec.checkpoint
+
+ create_kwargs.update(spec.provider_kwargs)
+
+ try:
+ func = factory.create(**create_kwargs)
+ except ImportError as e:
+ raise ImportError(
+ f"Provider '{spec.provider}' requires additional dependencies. "
+ "Install the provider's extra dependencies and try again."
+ ) from e
+
+ self._cache[cache_key] = func
+ return func
+
+ def embed_images(
+ self,
+ samples: list[Any],
+ spec: EmbeddingSpec,
+ batch_size: int = 32,
+ show_progress: bool = True,
+ ) -> np.ndarray:
+ """Compute embeddings for image samples.
+
+ Args:
+ samples: List of Sample objects with image filepaths.
+ spec: Embedding specification.
+ batch_size: Batch size for processing.
+ show_progress: Whether to show progress.
+
+ Returns:
+ Array of shape (N, D) where N is len(samples) and D is embedding dim.
+ """
+ func = self.get_function(spec)
+
+ if show_progress:
+ print(f"Computing embeddings for {len(samples)} samples...")
+
+ all_embeddings: list[np.ndarray] = []
+ for i in range(0, len(samples), batch_size):
+ batch_samples = samples[i:i + batch_size]
+
+ batch_paths = [s.filepath for s in batch_samples]
+ batch_embeddings = func.compute_source_embeddings(batch_paths)
+ all_embeddings.extend(batch_embeddings)
+
+ return np.array(all_embeddings, dtype=np.float32)
+
+ def embed_texts(
+ self,
+ texts: list[str],
+ spec: EmbeddingSpec,
+ ) -> np.ndarray:
+ """Compute embeddings for text inputs.
+
+ Args:
+ texts: List of text strings.
+ spec: Embedding specification.
+
+ Returns:
+ Array of shape (N, D).
+ """
+ func = self.get_function(spec)
+
+ if hasattr(func, "generate_embeddings"):
+ out = func.generate_embeddings(texts)
+ return np.asarray(out, dtype=np.float32)
+
+ embeddings: list[np.ndarray] = []
+ for text in texts:
+ out = func.compute_query_embeddings(text)
+ if not out:
+ raise RuntimeError(f"Provider '{spec.provider}' returned no embedding for query")
+ embeddings.append(np.asarray(out[0], dtype=np.float32))
+ return np.vstack(embeddings)
+
+ def get_space_config(self, spec: EmbeddingSpec, dim: int) -> dict[str, Any]:
+ """Get space configuration for storage.
+
+ Args:
+ spec: Embedding specification.
+ dim: Embedding dimension.
+
+ Returns:
+ Config dict for SpaceInfo.config_json.
+ """
+ func = self.get_function(spec)
+
+ config = spec.to_dict()
+ config["dim"] = dim
+
+ if hasattr(func, "geometry"):
+ config["geometry"] = func.geometry
+ if hasattr(func, "curvature") and func.curvature is not None:
+ config["curvature"] = func.curvature
+
+ if config.get("geometry") == "hyperboloid":
+ config["spatial_dim"] = dim - 1
+
+ return config
+
+
+_ENGINE: EmbeddingEngine | None = None
+
+
+def get_engine() -> EmbeddingEngine:
+ """Get the global embedding engine singleton."""
+ global _ENGINE
+ if _ENGINE is None:
+ _ENGINE = EmbeddingEngine()
+ return _ENGINE
+
+
+def list_embedding_providers(available_only: bool = False) -> list[str]:
+ """List all registered embedding providers.
+
+ Args:
+ available_only: If True, only return providers whose dependencies are installed.
+
+ Returns:
+ List of provider identifiers.
+ """
+ from lancedb.embeddings import get_registry
+
+ registry = get_registry()
+
+ all_providers = list(getattr(registry, "_functions", {}).keys())
+
+ if not available_only:
+ return sorted(all_providers)
+
+ available: list[str] = []
+ for provider in all_providers:
+ try:
+ factory = registry.get(provider)
+ factory.create()
+ available.append(provider)
+ except ImportError:
+ pass
+ except (TypeError, ValueError):
+ available.append(provider)
+
+ return sorted(available)
+
+
+def get_provider_info(provider: str) -> dict[str, Any]:
+ """Get information about an embedding provider.
+
+ Args:
+ provider: Provider identifier.
+
+ Returns:
+ Dict with provider info.
+ """
+ from lancedb.embeddings import get_registry
+
+ registry = get_registry()
+
+ try:
+ factory = registry.get(provider)
+ except KeyError:
+ raise ValueError(f"Unknown provider: {provider}") from None
+
+ info: dict[str, Any] = {
+ "provider": provider,
+ "source": "hyperview" if provider in ("embed-anything", "hyper-models") else "lancedb",
+ "geometry": "hyperboloid" if provider in HYPERBOLIC_PROVIDERS else "euclidean",
+ }
+
+ try:
+ factory.create()
+ info["installed"] = True
+ except ImportError:
+ info["installed"] = False
+ except (TypeError, ValueError):
+ info["installed"] = True
+
+ return info
diff --git a/src/hyperview/embeddings/pipelines.py b/src/hyperview/embeddings/pipelines.py
new file mode 100644
index 0000000..e3aa716
--- /dev/null
+++ b/src/hyperview/embeddings/pipelines.py
@@ -0,0 +1,203 @@
+"""Compute orchestration pipelines for HyperView.
+
+These functions coordinate embedding computation and 2D layout/projection
+computation, persisting results into the configured storage backend.
+"""
+
+from __future__ import annotations
+
+from typing import Any
+
+import numpy as np
+
+from hyperview.storage.backend import StorageBackend
+from hyperview.storage.schema import make_layout_key
+
+
+def compute_embeddings(
+ storage: StorageBackend,
+ spec: Any,
+ batch_size: int = 32,
+ show_progress: bool = True,
+) -> tuple[str, int, int]:
+ """Compute embeddings for samples that don't have them yet.
+
+ Args:
+ storage: Storage backend to read samples from and write embeddings to.
+ spec: Embedding specification (provider, model_id, etc.)
+ batch_size: Batch size for processing.
+ show_progress: Whether to show progress bar.
+
+ Returns:
+ Tuple of (space_key, num_computed, num_skipped).
+
+ Raises:
+ ValueError: If no samples in storage or provider not found.
+ """
+ from hyperview.embeddings.engine import get_engine
+
+ engine = get_engine()
+
+ all_samples = storage.get_all_samples()
+ if not all_samples:
+ raise ValueError("No samples in storage")
+
+ # Generate space key before computing (deterministic from spec)
+ space_key = spec.make_space_key()
+
+ # Check which samples need embeddings
+ missing_ids = storage.get_missing_embedding_ids(space_key)
+
+ # If space doesn't exist yet, all samples are missing
+ if not storage.get_space(space_key):
+ missing_ids = [s.id for s in all_samples]
+
+ num_skipped = len(all_samples) - len(missing_ids)
+
+ if not missing_ids:
+ if show_progress:
+ print(f"All {len(all_samples)} samples already have embeddings in space '{space_key}'")
+ return space_key, 0, num_skipped
+
+ samples_to_embed = storage.get_samples_by_ids(missing_ids)
+
+ if show_progress and num_skipped > 0:
+ print(f"Skipped {num_skipped} samples with existing embeddings")
+
+ # Compute all embeddings via the engine
+ embeddings = engine.embed_images(
+ samples=samples_to_embed,
+ spec=spec,
+ batch_size=batch_size,
+ show_progress=show_progress,
+ )
+
+ dim = embeddings.shape[1]
+
+ # Ensure space exists (create if needed)
+ config = engine.get_space_config(spec, dim)
+ storage.ensure_space(
+ model_id=spec.model_id or spec.provider,
+ dim=dim,
+ config=config,
+ space_key=space_key,
+ )
+
+ # Store embeddings
+ ids = [s.id for s in samples_to_embed]
+ storage.add_embeddings(space_key, ids, embeddings)
+
+ return space_key, len(ids), num_skipped
+
+
+def compute_layout(
+ storage: StorageBackend,
+ space_key: str | None = None,
+ method: str = "umap",
+ geometry: str = "euclidean",
+ n_neighbors: int = 15,
+ min_dist: float = 0.1,
+ metric: str = "cosine",
+ force: bool = False,
+ show_progress: bool = True,
+) -> str:
+ """Compute 2D layout/projection for visualization.
+
+ Args:
+ storage: Storage backend with embeddings.
+ space_key: Embedding space to project. If None, uses the first available.
+ method: Projection method ('umap' supported).
+ geometry: Output geometry type ('euclidean' or 'poincare').
+ n_neighbors: Number of neighbors for UMAP.
+ min_dist: Minimum distance for UMAP.
+ metric: Distance metric for UMAP.
+ force: Force recomputation even if layout exists.
+ show_progress: Whether to print progress messages.
+
+ Returns:
+ layout_key for the computed layout.
+
+ Raises:
+ ValueError: If no embedding spaces, space not found, or insufficient samples.
+ """
+ from hyperview.embeddings.projection import ProjectionEngine
+
+ if method != "umap":
+ raise ValueError(f"Invalid method: {method}. Only 'umap' is supported.")
+
+ if geometry not in ("euclidean", "poincare"):
+ raise ValueError(f"Invalid geometry: {geometry}. Must be 'euclidean' or 'poincare'.")
+
+ if space_key is None:
+ spaces = storage.list_spaces()
+ if not spaces:
+ raise ValueError("No embedding spaces. Call compute_embeddings() first.")
+
+ # Choose a sensible default space based on the requested output geometry.
+ # - For Poincaré output, prefer a hyperbolic (hyperboloid) embedding space if present.
+ # - For Euclidean output, prefer a Euclidean embedding space if present.
+ if geometry == "poincare":
+ preferred = next((s for s in spaces if s.geometry == "hyperboloid"), None)
+ else:
+ preferred = next((s for s in spaces if s.geometry != "hyperboloid"), None)
+
+ space_key = (preferred.space_key if preferred is not None else spaces[0].space_key)
+
+ space = storage.get_space(space_key)
+ if space is None:
+ raise ValueError(f"Space not found: {space_key}")
+
+ input_geometry = space.geometry
+ curvature = (space.config or {}).get("curvature")
+
+ ids, vectors = storage.get_embeddings(space_key)
+ if len(ids) == 0:
+ raise ValueError(f"No embeddings in space '{space_key}'. Call compute_embeddings() first.")
+
+ if len(ids) < 3:
+ raise ValueError(f"Need at least 3 samples for visualization, have {len(ids)}")
+
+ layout_params = {
+ "n_neighbors": n_neighbors,
+ "min_dist": min_dist,
+ "metric": metric,
+ }
+ layout_key = make_layout_key(space_key, method, geometry, layout_params)
+
+ if not force:
+ existing_layout = storage.get_layout(layout_key)
+ if existing_layout is not None:
+ existing_ids, _ = storage.get_layout_coords(layout_key)
+ if set(existing_ids) == set(ids):
+ if show_progress:
+ print(f"Layout '{layout_key}' already exists with {len(ids)} points")
+ return layout_key
+ if show_progress:
+ print("Layout exists but has different samples, recomputing...")
+
+ if show_progress:
+ print(f"Computing {geometry} {method} layout for {len(ids)} samples...")
+
+ storage.ensure_layout(
+ layout_key=layout_key,
+ space_key=space_key,
+ method=method,
+ geometry=geometry,
+ params=layout_params,
+ )
+
+ engine = ProjectionEngine()
+ coords = engine.project(
+ vectors,
+ input_geometry=input_geometry,
+ output_geometry=geometry,
+ curvature=curvature,
+ method=method,
+ n_neighbors=n_neighbors,
+ min_dist=min_dist,
+ metric=metric,
+ )
+
+ storage.add_layout_coords(layout_key, ids, coords)
+
+ return layout_key
diff --git a/src/hyperview/embeddings/projection.py b/src/hyperview/embeddings/projection.py
index ecef3c8..83dff50 100644
--- a/src/hyperview/embeddings/projection.py
+++ b/src/hyperview/embeddings/projection.py
@@ -1,8 +1,7 @@
"""Projection methods for dimensionality reduction."""
-from __future__ import annotations
-
import logging
+import warnings
import numpy as np
import umap
@@ -13,44 +12,147 @@
class ProjectionEngine:
"""Engine for projecting high-dimensional embeddings to 2D."""
- def project_umap(
+ def to_poincare_ball(
+ self,
+ hyperboloid_embeddings: np.ndarray,
+ curvature: float | None = None,
+ clamp_radius: float = 0.999999,
+ ) -> np.ndarray:
+ """Convert hyperboloid (Lorentz) coordinates to Poincaré ball coordinates.
+
+ Input is expected to be shape (N, D+1) with first coordinate being time-like.
+ Points are assumed to satisfy: t^2 - ||x||^2 = 1/c (c > 0).
+
+ Returns Poincaré ball coordinates of shape (N, D) in the unit ball.
+
+ Notes:
+ - Many hyperbolic libraries parameterize curvature as a positive number c
+ where the manifold has sectional curvature -c.
+ - We map to the unit ball for downstream distance metrics (UMAP 'poincare').
+ """
+ if hyperboloid_embeddings.ndim != 2 or hyperboloid_embeddings.shape[1] < 2:
+ raise ValueError(
+ "hyperboloid_embeddings must have shape (N, D+1) with D>=1"
+ )
+
+ c = float(curvature) if curvature is not None else 1.0
+ if c <= 0:
+ raise ValueError(f"curvature must be > 0, got {c}")
+
+ # Radius R = 1/sqrt(c) for curvature -c
+ R = 1.0 / np.sqrt(c)
+
+ t = hyperboloid_embeddings[:, :1]
+ x = hyperboloid_embeddings[:, 1:]
+
+ # Map to ball radius R: u_R = x / (t + R)
+ denom = t + R
+ u_R = x / denom
+
+ # Rescale to unit ball: u = u_R / R = sqrt(c) * u_R
+ u = u_R / R
+
+ # Numerical guard: ensure inside the unit ball
+ radii = np.linalg.norm(u, axis=1)
+ mask = radii >= clamp_radius
+ if np.any(mask):
+ u[mask] = u[mask] / radii[mask][:, np.newaxis] * clamp_radius
+
+ return u.astype(np.float32)
+
+ def project(
self,
embeddings: np.ndarray,
+ *,
+ input_geometry: str = "euclidean",
+ output_geometry: str = "euclidean",
+ curvature: float | None = None,
+ method: str = "umap",
n_neighbors: int = 15,
min_dist: float = 0.1,
metric: str = "cosine",
- n_components: int = 2,
random_state: int = 42,
) -> np.ndarray:
- """Project embeddings to Euclidean 2D using UMAP.
+ """Project embeddings to 2D with geometry-aware preprocessing.
+
+ This separates two concerns:
+ 1) Geometry/model transforms for the *input* embeddings (e.g. hyperboloid -> Poincaré)
+ 2) Dimensionality reduction / layout (currently UMAP)
Args:
- embeddings: High-dimensional embeddings (N x D).
- n_neighbors: Number of neighbors for UMAP.
- min_dist: Minimum distance between points.
- metric: Distance metric to use.
- n_components: Number of output dimensions.
- random_state: Random seed for reproducibility.
+ embeddings: Input embeddings (N x D) or hyperboloid (N x D+1).
+ input_geometry: Geometry/model of the input embeddings (euclidean, hyperboloid).
+ output_geometry: Geometry of the output coordinates (euclidean, poincare).
+ curvature: Curvature parameter for hyperbolic embeddings (positive c).
+ method: Layout method (currently only 'umap').
+ n_neighbors: UMAP neighbors.
+ min_dist: UMAP min_dist.
+ metric: Input metric (used for euclidean inputs).
+ random_state: Random seed.
Returns:
2D coordinates (N x 2).
"""
- # Safety check for small datasets
+ if method != "umap":
+ raise ValueError(f"Invalid method: {method}. Only 'umap' is supported.")
+
+ prepared = embeddings
+ prepared_metric: str = metric
+
+ if input_geometry == "hyperboloid":
+ # Convert to unit Poincaré ball and use UMAP's built-in hyperbolic distance.
+ prepared = self.to_poincare_ball(embeddings, curvature=curvature)
+ prepared_metric = "poincare"
+
+ if output_geometry == "poincare":
+ return self.project_to_poincare(
+ prepared,
+ n_neighbors=n_neighbors,
+ min_dist=min_dist,
+ metric=prepared_metric,
+ random_state=random_state,
+ )
+
+ if output_geometry == "euclidean":
+ return self.project_umap(
+ prepared,
+ n_neighbors=n_neighbors,
+ min_dist=min_dist,
+ metric=prepared_metric,
+ n_components=2,
+ random_state=random_state,
+ )
+
+ raise ValueError(
+ f"Invalid output_geometry: {output_geometry}. Must be 'euclidean' or 'poincare'."
+ )
+
+ def project_umap(
+ self,
+ embeddings: np.ndarray,
+ n_neighbors: int = 15,
+ min_dist: float = 0.1,
+ metric: str = "cosine",
+ n_components: int = 2,
+ random_state: int = 42,
+ ) -> np.ndarray:
+ """Project embeddings to Euclidean 2D using UMAP."""
n_neighbors = min(n_neighbors, len(embeddings) - 1)
if n_neighbors < 2:
n_neighbors = 2
+ n_jobs = 1 if random_state is not None else -1
+
reducer = umap.UMAP(
n_neighbors=n_neighbors,
min_dist=min_dist,
n_components=n_components,
metric=metric,
random_state=random_state,
+ n_jobs=n_jobs,
)
coords = reducer.fit_transform(embeddings)
-
- # Normalize to [-1, 1] range for visualization consistency
coords = self._normalize_coords(coords)
return coords
@@ -63,53 +165,35 @@ def project_to_poincare(
metric: str = "cosine",
random_state: int = 42,
) -> np.ndarray:
- """Project embeddings to the Poincaré disk.
-
- This uses UMAP with a hyperbolic output metric. UMAP computes the embedding
- in the Hyperboloid model (Lorentz model). We then project this to the
- Poincaré disk.
-
- Args:
- embeddings: High-dimensional embeddings (N x D).
- n_neighbors: Number of neighbors for UMAP.
- min_dist: Minimum distance between points.
- metric: Input distance metric.
- random_state: Random seed for reproducibility.
-
- Returns:
- 2D coordinates in Poincaré disk (N x 2), with norm < 1.
- """
- # Safety check for small datasets
+ """Project embeddings to the Poincaré disk using UMAP with hyperboloid output."""
n_neighbors = min(n_neighbors, len(embeddings) - 1)
if n_neighbors < 2:
n_neighbors = 2
- # The time-like coordinate t is implicit: t = sqrt(1 + x^2 + y^2).
- reducer = umap.UMAP(
- n_neighbors=n_neighbors,
- min_dist=min_dist,
- n_components=2, # We want a 2D manifold
- metric=metric,
- output_metric="hyperboloid",
- random_state=random_state,
- )
- # These are spatial coordinates (x, y) in the Hyperboloid model
- spatial_coords = reducer.fit_transform(embeddings)
+ n_jobs = 1 if random_state is not None else -1
+
+ # Suppress warning about missing gradient for poincare metric (only affects inverse_transform)
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", message="gradient function is not yet implemented")
+ reducer = umap.UMAP(
+ n_neighbors=n_neighbors,
+ min_dist=min_dist,
+ n_components=2,
+ metric=metric,
+ output_metric="hyperboloid",
+ random_state=random_state,
+ n_jobs=n_jobs,
+ )
+ spatial_coords = reducer.fit_transform(embeddings)
- # Calculate implicit time coordinate t
- # t = sqrt(1 + x^2 + y^2)
- # Note: In some conventions it's t^2 - x^2 - y^2 = 1, so t = sqrt(1 + r^2)
squared_norm = np.sum(spatial_coords**2, axis=1)
t = np.sqrt(1 + squared_norm)
- # Project to Poincaré disk
- # Formula: u = x / (1 + t)
- # This maps the upper sheet of the hyperboloid (t >= 1) to the unit disk.
+ # Project to Poincaré disk: u = x / (1 + t)
denom = 1 + t
poincare_coords = spatial_coords / denom[:, np.newaxis]
- # Ensure numerical stability - clamp to slightly less than 1.0 if needed
- # theoretically it should be < 1, but float precision might cause issues
+ # Clamp to unit disk for numerical stability
radii = np.linalg.norm(poincare_coords, axis=1)
max_radius = 0.999
mask = radii > max_radius
@@ -119,12 +203,7 @@ def project_to_poincare(
poincare_coords[mask] / radii[mask][:, np.newaxis] * max_radius
)
- # Center the embeddings in the Poincaré disk
poincare_coords = self._center_poincare(poincare_coords)
-
- # Apply radial scaling to reduce crowding at the boundary
- # This effectively "zooms out" in hyperbolic space, pulling points
- # towards the center for better visualization.
poincare_coords = self._scale_poincare(poincare_coords, factor=0.65)
return poincare_coords
@@ -132,57 +211,32 @@ def project_to_poincare(
def _scale_poincare(self, coords: np.ndarray, factor: float) -> np.ndarray:
"""Scale points towards the origin in hyperbolic space.
- This scales the hyperbolic distance from the origin by `factor`.
- If factor < 1, points move closer to the center.
+ Scales hyperbolic distance from origin by `factor`. If factor < 1, points move closer to center.
"""
radii = np.linalg.norm(coords, axis=1)
- # Avoid division by zero
mask = radii > 1e-6
- # Calculate hyperbolic distance from origin
- # d = 2 * arctanh(r)
- # We want d_new = factor * d
- # r_new = tanh(d_new / 2) = tanh(factor * arctanh(r))
-
- # Use numpy operations for efficiency
r = radii[mask]
- # Clip r to avoid infinity in arctanh
r = np.minimum(r, 0.9999999)
-
- # d = 2 * np.arctanh(r)
- # r_new = np.tanh(factor * d / 2)
- # Simplified: r_new = tanh(factor * arctanh(r))
r_new = np.tanh(factor * np.arctanh(r))
- # Update coordinates
- # new_coords = coords * (r_new / r)
scale_ratios = np.ones_like(radii)
scale_ratios[mask] = r_new / r
return coords * scale_ratios[:, np.newaxis]
def _center_poincare(self, coords: np.ndarray) -> np.ndarray:
- """Center points in the Poincaré disk using a Möbius transformation.
-
- This moves the geometric centroid of the points to the origin.
- """
+ """Center points in the Poincaré disk using a Möbius transformation."""
if len(coords) == 0:
return coords
- # Treat as complex numbers for easier Möbius math
z = coords[:, 0] + 1j * coords[:, 1]
-
- # Compute the centroid (Euclidean mean in the disk)
- # This is a heuristic; the true hyperbolic center of mass is harder
- # but this works well for visualization centering.
centroid = np.mean(z)
- # If centroid is too close to boundary, don't center (unstable)
if np.abs(centroid) > 0.99 or np.abs(centroid) < 1e-6:
return coords
- # Möbius transformation to move centroid to origin:
- # w = (z - a) / (1 - conj(a) * z)
+ # Möbius transformation: w = (z - a) / (1 - conj(a) * z)
a = centroid
w = (z - a) / (1 - np.conj(a) * z)
@@ -193,34 +247,19 @@ def _normalize_coords(self, coords: np.ndarray) -> np.ndarray:
if len(coords) == 0:
return coords
- # Center the coordinates
coords = coords - coords.mean(axis=0)
-
- # Scale to fit in [-1, 1]
max_abs = np.abs(coords).max()
if max_abs > 0:
- coords = coords / max_abs * 0.95 # Leave some margin
+ coords = coords / max_abs * 0.95
return coords
def poincare_distance(self, u: np.ndarray, v: np.ndarray) -> float:
- """Compute the Poincaré distance between two points.
-
- Args:
- u: First point in Poincaré disk.
- v: Second point in Poincaré disk.
-
- Returns:
- Hyperbolic distance.
- """
+ """Compute the Poincaré distance between two points."""
u_norm_sq = np.sum(u**2)
v_norm_sq = np.sum(v**2)
diff_norm_sq = np.sum((u - v) ** 2)
- # Poincaré distance formula
- # d(u, v) = arccosh(1 + 2 * |u-v|^2 / ((1-|u|^2)(1-|v|^2)))
-
- # Clip values to avoid division by zero or negative logs
u_norm_sq = min(u_norm_sq, 0.99999)
v_norm_sq = min(v_norm_sq, 0.99999)
diff --git a/src/hyperview/embeddings/providers/__init__.py b/src/hyperview/embeddings/providers/__init__.py
new file mode 100644
index 0000000..9bdcc43
--- /dev/null
+++ b/src/hyperview/embeddings/providers/__init__.py
@@ -0,0 +1,7 @@
+"""Embedding providers.
+
+HyperView integrates with LanceDB's embedding registry.
+Custom providers are registered in `lancedb_providers.py`.
+"""
+
+__all__: list[str] = []
diff --git a/src/hyperview/embeddings/providers/lancedb_providers.py b/src/hyperview/embeddings/providers/lancedb_providers.py
new file mode 100644
index 0000000..d64f858
--- /dev/null
+++ b/src/hyperview/embeddings/providers/lancedb_providers.py
@@ -0,0 +1,196 @@
+"""LanceDB-registered embedding providers for HyperView.
+
+This module registers HyperView's embedding providers into the LanceDB embedding
+registry using the @register decorator.
+
+Providers:
+- embed-anything: CLIP-based image embeddings (torch-free, default)
+- hyper-models: Non-Euclidean model zoo via `hyper-models` (torch-free ONNX; downloads from HF Hub)
+"""
+
+from __future__ import annotations
+
+from typing import Any
+
+import numpy as np
+from lancedb.embeddings import EmbeddingFunction, register
+from pydantic import PrivateAttr
+
+__all__ = [
+ "EmbedAnythingEmbeddings",
+ "HyperModelsEmbeddings",
+]
+
+
+@register("embed-anything")
+class EmbedAnythingEmbeddings(EmbeddingFunction):
+ """CLIP-based image embeddings via embed-anything.
+
+ This is the default provider for HyperView - lightweight and torch-free.
+
+ Args:
+ name: HuggingFace model ID for CLIP (default: openai/clip-vit-base-patch32)
+ batch_size: Batch size for processing
+ """
+
+ name: str = "openai/clip-vit-base-patch32"
+ batch_size: int = 32
+
+ _computer: Any = PrivateAttr(default=None)
+ _ndims: int | None = PrivateAttr(default=None)
+
+ def __init__(self, **kwargs: Any) -> None:
+ super().__init__(**kwargs)
+ self._computer = None
+ self._ndims = None
+
+ def _get_computer(self) -> Any:
+ if self._computer is None:
+ from hyperview.embeddings.compute import EmbeddingComputer
+
+ self._computer = EmbeddingComputer(model=self.name)
+ return self._computer
+
+ def ndims(self) -> int:
+ if self._ndims is None:
+ if "large" in self.name.lower():
+ self._ndims = 768
+ elif "clip" in self.name.lower():
+ self._ndims = 512
+ else:
+ self._ndims = 512
+ return self._ndims
+
+ def compute_source_embeddings(
+ self, inputs: Any, *args: Any, **kwargs: Any
+ ) -> list[np.ndarray | None]:
+ from hyperview.core.sample import Sample
+
+ computer = self._get_computer()
+
+ samples: list[Any] = []
+ for inp in self.sanitize_input(inputs):
+ if isinstance(inp, Sample):
+ samples.append(inp)
+ elif isinstance(inp, str):
+ samples.append(Sample(id=inp, filepath=inp))
+ else:
+ raise TypeError(f"Unsupported input type: {type(inp)}")
+
+ embeddings = computer.compute_batch(samples, batch_size=self.batch_size, show_progress=False)
+ return list(embeddings)
+
+ def compute_query_embeddings(
+ self, query: Any, *args: Any, **kwargs: Any
+ ) -> list[np.ndarray | None]:
+ return self.compute_source_embeddings([query], *args, **kwargs)
+
+
+@register("hyper-models")
+class HyperModelsEmbeddings(EmbeddingFunction):
+ """Non-Euclidean embeddings via the `hyper-models` package.
+
+ This provider is a thin wrapper around `hyper_models.load(...)`.
+ Models are downloaded from the Hugging Face Hub on first use.
+
+ Args:
+ name: Model name in the hyper-models registry (e.g. 'hycoclip-vit-s').
+ checkpoint: Optional local path to an ONNX file (skips hub download).
+ batch_size: Batch size hint. Current HyCoCLIP/MERU ONNX exports may only
+ support batch_size=1; HyperView encodes one image at a time for
+ maximum compatibility.
+ """
+
+ name: str = "hycoclip-vit-s"
+ checkpoint: str | None = None
+ batch_size: int = 1
+
+ _model: Any = PrivateAttr(default=None)
+ _model_info: Any = PrivateAttr(default=None)
+
+ def __init__(self, **kwargs: Any) -> None:
+ super().__init__(**kwargs)
+ self._model = None
+ self._model_info = None
+
+ def _ensure_model_info(self) -> None:
+ if self._model_info is not None:
+ return
+
+ try:
+ import hyper_models
+ except ImportError as e:
+ raise ImportError(
+ "Provider 'hyper-models' requires the 'hyper-models' package. "
+ "Install it with: `uv pip install hyper-models`"
+ ) from e
+
+ try:
+ self._model_info = hyper_models.get_model_info(self.name)
+ except KeyError:
+ available = ", ".join(sorted(hyper_models.list_models()))
+ raise ValueError(
+ f"Unknown hyper-models model: '{self.name}'. Available: {available}"
+ ) from None
+
+ def _ensure_model(self) -> None:
+ if self._model is not None:
+ return
+
+ self._ensure_model_info()
+ import hyper_models
+
+ self._model = hyper_models.load(self.name, local_path=self.checkpoint)
+
+ def ndims(self) -> int:
+ self._ensure_model_info()
+ assert self._model_info is not None
+ return int(getattr(self._model_info, "dim"))
+
+ @property
+ def geometry(self) -> str:
+ self._ensure_model_info()
+ assert self._model_info is not None
+ return str(getattr(self._model_info, "geometry"))
+
+ def compute_source_embeddings(
+ self, inputs: Any, *args: Any, **kwargs: Any
+ ) -> list[np.ndarray | None]:
+ from hyperview.core.sample import Sample
+
+ self._ensure_model()
+ assert self._model is not None
+
+ inputs = self.sanitize_input(inputs)
+ all_embeddings: list[np.ndarray | None] = []
+
+ from PIL import Image
+
+ for inp in inputs:
+ if isinstance(inp, Sample):
+ with inp.load_image() as img:
+ img.load()
+ if img.mode != "RGB":
+ img = img.convert("RGB")
+ pil_img = img.copy()
+ elif isinstance(inp, str):
+ with Image.open(inp) as img:
+ img.load()
+ if img.mode != "RGB":
+ img = img.convert("RGB")
+ pil_img = img.copy()
+ elif isinstance(inp, Image.Image):
+ pil_img = inp.convert("RGB") if inp.mode != "RGB" else inp
+ else:
+ raise TypeError(f"Unsupported input type: {type(inp)}")
+
+ emb = self._model.encode_images([pil_img])
+ vec = np.asarray(emb[0], dtype=np.float32)
+ all_embeddings.append(vec)
+
+ return all_embeddings
+
+ def compute_query_embeddings(
+ self, query: Any, *args: Any, **kwargs: Any
+ ) -> list[np.ndarray | None]:
+ return self.compute_source_embeddings([query], *args, **kwargs)
diff --git a/src/hyperview/server/app.py b/src/hyperview/server/app.py
index d1f6bf9..86d4f9b 100644
--- a/src/hyperview/server/app.py
+++ b/src/hyperview/server/app.py
@@ -1,20 +1,23 @@
"""FastAPI application for HyperView."""
-from __future__ import annotations
-
import os
from pathlib import Path
+from typing import Any
-from fastapi import FastAPI, HTTPException, Query
+from fastapi import Depends, FastAPI, HTTPException, Query
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse, JSONResponse
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel
+import numpy as np
+
from hyperview.core.dataset import Dataset
+from hyperview.core.selection import points_in_polygon
# Global dataset reference (set by launch())
_current_dataset: Dataset | None = None
+_current_session_id: str | None = None
class SelectionRequest(BaseModel):
@@ -23,6 +26,30 @@ class SelectionRequest(BaseModel):
sample_ids: list[str]
+class LassoSelectionRequest(BaseModel):
+ """Request model for lasso selection queries."""
+
+ layout_key: str # e.g., "openai_clip-vit-base-patch32__umap"
+ # Polygon vertices in data space, interleaved: [x0, y0, x1, y1, ...]
+ polygon: list[float]
+ offset: int = 0
+ limit: int = 100
+ include_thumbnails: bool = True
+
+
+class CurationFilterRequest(BaseModel):
+ """Request model for curation filtering."""
+
+ min_aesthetic_score: float | None = None
+ min_motion_score: float | None = None
+ max_cosine_similarity: float | None = None
+ caption_query: str | None = None
+ dedup_status: str | None = None
+ offset: int = 0
+ limit: int = 100
+ include_thumbnails: bool = True
+
+
class SampleResponse(BaseModel):
"""Response model for a sample."""
@@ -32,8 +59,31 @@ class SampleResponse(BaseModel):
label: str | None
thumbnail: str | None
metadata: dict
- embedding_2d: list[float] | None = None
- embedding_2d_hyperbolic: list[float] | None = None
+ width: int | None = None
+ height: int | None = None
+
+
+class LayoutInfoResponse(BaseModel):
+ """Response model for layout info."""
+
+ layout_key: str
+ space_key: str
+ method: str
+ geometry: str
+ count: int
+ params: dict[str, Any] | None
+
+
+class SpaceInfoResponse(BaseModel):
+ """Response model for embedding space info."""
+
+ space_key: str
+ model_id: str
+ dim: int
+ count: int
+ provider: str
+ geometry: str
+ config: dict[str, Any] | None
class DatasetResponse(BaseModel):
@@ -42,20 +92,185 @@ class DatasetResponse(BaseModel):
name: str
num_samples: int
labels: list[str]
- label_colors: dict[str, str]
+ spaces: list[SpaceInfoResponse]
+ layouts: list[LayoutInfoResponse]
class EmbeddingsResponse(BaseModel):
- """Response model for embeddings data."""
+ """Response model for embeddings data (for scatter plot)."""
+ layout_key: str
+ geometry: str
ids: list[str]
labels: list[str | None]
- euclidean: list[list[float]]
- hyperbolic: list[list[float]]
- label_colors: dict[str, str]
+ coords: list[list[float]]
+
+
+class SimilarSampleResponse(BaseModel):
+ """Response model for a similar sample with distance."""
+
+ id: str
+ filepath: str
+ filename: str
+ label: str | None
+ thumbnail: str | None
+ distance: float
+ metadata: dict
+
+
+class SimilaritySearchResponse(BaseModel):
+ """Response model for similarity search results."""
+
+ query_id: str
+ k: int
+ results: list[SimilarSampleResponse]
+
+
+def _extract_caption_sections(caption_text: str | None) -> tuple[str | None, str | None]:
+ if not caption_text:
+ return None, None
+
+ reasoning: str | None = None
+ answer: str | None = None
+
+ think_start = caption_text.find("")
+ think_end = caption_text.find(" ")
+ if think_start != -1 and think_end != -1 and think_end > think_start:
+ reasoning = caption_text[think_start + len("") : think_end].strip() or None
+
+ answer_start = caption_text.find("")
+ answer_end = caption_text.find(" ")
+ if answer_start != -1 and answer_end != -1 and answer_end > answer_start:
+ answer = caption_text[answer_start + len("") : answer_end].strip() or None
+
+ if answer is None:
+ answer = caption_text.strip() or None
+
+ return reasoning, answer
+
+
+def _resolve_video_file(sample: Any) -> Path | None:
+ metadata = sample.metadata if isinstance(sample.metadata, dict) else {}
+ candidate_paths: list[str] = []
+
+ for key in ("video_path", "clip_location"):
+ value = metadata.get(key)
+ if isinstance(value, str) and value.strip():
+ candidate_paths.append(value.strip())
+
+ if isinstance(sample.filepath, str) and sample.filepath.lower().endswith(".mp4"):
+ candidate_paths.append(sample.filepath)
+
+ for candidate in candidate_paths:
+ path = Path(candidate).expanduser()
+ if path.exists() and path.is_file():
+ return path.resolve()
+
+ return None
+
+
+def _metadata_number(metadata: dict[str, Any], *keys: str) -> float | None:
+ for key in keys:
+ value = metadata.get(key)
+ if isinstance(value, (int, float)):
+ numeric = float(value)
+ if np.isfinite(numeric):
+ return numeric
+ return None
+
+def _metadata_text(metadata: dict[str, Any], *keys: str) -> str | None:
+ for key in keys:
+ value = metadata.get(key)
+ if isinstance(value, str) and value.strip():
+ return value.strip()
+ return None
-def create_app(dataset: Dataset | None = None) -> FastAPI:
+
+def _sample_has_video(sample: Any) -> bool:
+ metadata = sample.metadata if isinstance(sample.metadata, dict) else {}
+ if _metadata_text(metadata, "video_path", "clip_location"):
+ return True
+ return isinstance(sample.filepath, str) and sample.filepath.lower().endswith(".mp4")
+
+
+def _sample_caption_text(metadata: dict[str, Any]) -> str | None:
+ return _metadata_text(
+ metadata,
+ "caption_answer",
+ "first_caption",
+ "caption_raw",
+ "caption",
+ )
+
+
+def _histogram(values: list[float], *, bins: int, lower: float, upper: float) -> list[dict[str, float | int]]:
+ if not values:
+ return []
+
+ if upper <= lower:
+ upper = lower + 1.0
+
+ counts, edges = np.histogram(np.asarray(values, dtype=np.float32), bins=bins, range=(lower, upper))
+ result: list[dict[str, float | int]] = []
+ for idx, count in enumerate(counts.tolist()):
+ result.append(
+ {
+ "start": float(edges[idx]),
+ "end": float(edges[idx + 1]),
+ "count": int(count),
+ }
+ )
+ return result
+
+
+def _score_summary(values: list[float]) -> dict[str, float | int | None]:
+ if not values:
+ return {"count": 0, "min": None, "max": None, "avg": None}
+
+ arr = np.asarray(values, dtype=np.float32)
+ return {
+ "count": int(arr.size),
+ "min": float(np.min(arr)),
+ "max": float(np.max(arr)),
+ "avg": float(np.mean(arr)),
+ }
+
+
+def _matches_curation_filter(sample: Any, request: CurationFilterRequest) -> bool:
+ metadata = sample.metadata if isinstance(sample.metadata, dict) else {}
+
+ aesthetic = _metadata_number(metadata, "aesthetic_score")
+ if request.min_aesthetic_score is not None:
+ if aesthetic is None or aesthetic < request.min_aesthetic_score:
+ return False
+
+ motion = _metadata_number(metadata, "motion_score")
+ if request.min_motion_score is not None:
+ if motion is None or motion < request.min_motion_score:
+ return False
+
+ similarity = _metadata_number(metadata, "cosine_sim_score", "dedup_cosine_similarity")
+ if request.max_cosine_similarity is not None:
+ if similarity is None or similarity > request.max_cosine_similarity:
+ return False
+
+ if request.dedup_status:
+ dedup_status = _metadata_text(metadata, "dedup_status") or "unknown"
+ if dedup_status != request.dedup_status:
+ return False
+
+ if request.caption_query:
+ caption_text = _sample_caption_text(metadata)
+ if caption_text is None:
+ return False
+ if request.caption_query.lower() not in caption_text.lower():
+ return False
+
+ return True
+
+
+def create_app(dataset: Dataset | None = None, session_id: str | None = None) -> FastAPI:
"""Create the FastAPI application.
Args:
@@ -64,9 +279,11 @@ def create_app(dataset: Dataset | None = None) -> FastAPI:
Returns:
FastAPI application instance.
"""
- global _current_dataset
+ global _current_dataset, _current_session_id
if dataset is not None:
_current_dataset = dataset
+ if session_id is not None:
+ _current_session_id = session_id
app = FastAPI(
title="HyperView",
@@ -74,6 +291,12 @@ def create_app(dataset: Dataset | None = None) -> FastAPI:
version="0.1.0",
)
+ def get_dataset() -> Dataset:
+ """Dependency that returns the current dataset or raises 404."""
+ if _current_dataset is None:
+ raise HTTPException(status_code=404, detail="No dataset loaded")
+ return _current_dataset
+
# CORS middleware for development
app.add_middleware(
CORSMiddleware,
@@ -83,37 +306,44 @@ def create_app(dataset: Dataset | None = None) -> FastAPI:
allow_headers=["*"],
)
+ @app.get("/__hyperview__/health")
+ async def hyperview_health():
+ return {
+ "name": "hyperview",
+ "version": app.version,
+ "session_id": _current_session_id,
+ "dataset": _current_dataset.name if _current_dataset is not None else None,
+ "pid": os.getpid(),
+ }
+
@app.get("/api/dataset", response_model=DatasetResponse)
- async def get_dataset_info():
+ async def get_dataset_info(ds: Dataset = Depends(get_dataset)):
"""Get dataset metadata."""
- if _current_dataset is None:
- raise HTTPException(status_code=404, detail="No dataset loaded")
+ spaces = ds.list_spaces()
+ space_dicts = [s.to_api_dict() for s in spaces]
+
+ layouts = ds.list_layouts()
+ layout_dicts = [l.to_api_dict() for l in layouts]
return DatasetResponse(
- name=_current_dataset.name,
- num_samples=len(_current_dataset),
- labels=_current_dataset.labels,
- label_colors=_current_dataset.get_label_colors(),
+ name=ds.name,
+ num_samples=len(ds),
+ labels=ds.labels,
+ spaces=space_dicts,
+ layouts=layout_dicts,
)
@app.get("/api/samples")
async def get_samples(
+ ds: Dataset = Depends(get_dataset),
offset: int = Query(0, ge=0),
limit: int = Query(100, ge=1, le=1000),
label: str | None = None,
):
"""Get paginated samples with thumbnails."""
- if _current_dataset is None:
- raise HTTPException(status_code=404, detail="No dataset loaded")
-
- samples = _current_dataset.samples
-
- # Filter by label if specified
- if label:
- samples = [s for s in samples if s.label == label]
-
- total = len(samples)
- samples = samples[offset : offset + limit]
+ samples, total = ds.get_samples_paginated(
+ offset=offset, limit=limit, label=label
+ )
return {
"total": total,
@@ -123,76 +353,314 @@ async def get_samples(
}
@app.get("/api/samples/{sample_id}", response_model=SampleResponse)
- async def get_sample(sample_id: str):
+ async def get_sample(sample_id: str, ds: Dataset = Depends(get_dataset)):
"""Get a single sample by ID."""
- if _current_dataset is None:
- raise HTTPException(status_code=404, detail="No dataset loaded")
-
try:
- sample = _current_dataset[sample_id]
+ sample = ds[sample_id]
return SampleResponse(**sample.to_api_dict())
except KeyError:
raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
@app.post("/api/samples/batch")
- async def get_samples_batch(request: SelectionRequest):
+ async def get_samples_batch(request: SelectionRequest, ds: Dataset = Depends(get_dataset)):
"""Get multiple samples by their IDs."""
- if _current_dataset is None:
- raise HTTPException(status_code=404, detail="No dataset loaded")
-
- samples = []
- for sample_id in request.sample_ids:
- try:
- sample = _current_dataset[sample_id]
- samples.append(sample.to_api_dict(include_thumbnail=True))
- except KeyError:
- pass # Skip missing samples
-
- return {"samples": samples}
+ samples = ds.get_samples_by_ids(request.sample_ids)
+ return {"samples": [s.to_api_dict(include_thumbnail=True) for s in samples]}
@app.get("/api/embeddings", response_model=EmbeddingsResponse)
- async def get_embeddings():
- """Get all embeddings for visualization."""
- if _current_dataset is None:
- raise HTTPException(status_code=404, detail="No dataset loaded")
-
- samples = [
- s
- for s in _current_dataset.samples
- if s.embedding_2d is not None and s.embedding_2d_hyperbolic is not None
- ]
-
- if not samples:
+ async def get_embeddings(ds: Dataset = Depends(get_dataset), layout_key: str | None = None):
+ """Get embedding coordinates for visualization."""
+ layouts = ds.list_layouts()
+ if not layouts:
raise HTTPException(
- status_code=400, detail="No embeddings computed. Call compute_visualization() first."
+ status_code=400, detail="No layouts computed. Call compute_visualization() first."
)
+ # Find the requested layout
+ layout_info = None
+ if layout_key is None:
+ layout_info = layouts[0]
+ layout_key = layout_info.layout_key
+ else:
+ layout_info = next((l for l in layouts if l.layout_key == layout_key), None)
+ if layout_info is None:
+ raise HTTPException(status_code=404, detail=f"Layout not found: {layout_key}")
+
+ ids, labels, coords = ds.get_visualization_data(layout_key)
+
+ if not ids:
+ raise HTTPException(status_code=400, detail=f"No data in layout '{layout_key}'.")
+
return EmbeddingsResponse(
- ids=[s.id for s in samples],
- labels=[s.label for s in samples],
- euclidean=[s.embedding_2d for s in samples],
- hyperbolic=[s.embedding_2d_hyperbolic for s in samples],
- label_colors=_current_dataset.get_label_colors(),
+ layout_key=layout_key,
+ geometry=layout_info.geometry,
+ ids=ids,
+ labels=labels,
+ coords=coords.tolist(),
)
+ @app.get("/api/spaces")
+ async def get_spaces(ds: Dataset = Depends(get_dataset)):
+ """Get all embedding spaces."""
+ spaces = ds.list_spaces()
+ return {"spaces": [s.to_api_dict() for s in spaces]}
+
+ @app.get("/api/layouts")
+ async def get_layouts(ds: Dataset = Depends(get_dataset)):
+ """Get all available layouts."""
+ layouts = ds.list_layouts()
+ return {"layouts": [l.to_api_dict() for l in layouts]}
+
@app.post("/api/selection")
async def sync_selection(request: SelectionRequest):
"""Sync selection state (for future use)."""
return {"status": "ok", "selected": request.sample_ids}
+ @app.post("/api/selection/lasso")
+ async def lasso_selection(request: LassoSelectionRequest, ds: Dataset = Depends(get_dataset)):
+ """Compute a lasso selection over the current embeddings.
+
+ Returns a total selected count and a paginated page of selected samples.
+
+ Notes:
+ - Selection is performed in *data space* (the same coordinates returned
+ by /api/embeddings).
+ - For now we use an in-memory scan with a tight AABB prefilter.
+ """
+ if request.offset < 0:
+ raise HTTPException(status_code=400, detail="offset must be >= 0")
+ if request.limit < 1 or request.limit > 2000:
+ raise HTTPException(status_code=400, detail="limit must be between 1 and 2000")
+
+ if len(request.polygon) < 6 or len(request.polygon) % 2 != 0:
+ raise HTTPException(
+ status_code=400,
+ detail="polygon must be an even-length list with at least 3 vertices",
+ )
+
+ poly = np.asarray(request.polygon, dtype=np.float32).reshape((-1, 2))
+ if not np.all(np.isfinite(poly)):
+ raise HTTPException(status_code=400, detail="polygon must contain only finite numbers")
+
+ # Tight AABB prefilter.
+ x_min = float(np.min(poly[:, 0]))
+ x_max = float(np.max(poly[:, 0]))
+ y_min = float(np.min(poly[:, 1]))
+ y_max = float(np.max(poly[:, 1]))
+
+ candidate_ids, candidate_coords = ds.get_lasso_candidates_aabb(
+ layout_key=request.layout_key,
+ x_min=x_min,
+ x_max=x_max,
+ y_min=y_min,
+ y_max=y_max,
+ )
+
+ if candidate_coords.size == 0:
+ return {"total": 0, "offset": request.offset, "limit": request.limit, "sample_ids": [], "samples": []}
+
+ inside_mask = points_in_polygon(candidate_coords, poly)
+ if not np.any(inside_mask):
+ return {"total": 0, "offset": request.offset, "limit": request.limit, "sample_ids": [], "samples": []}
+
+ selected_ids = [candidate_ids[i] for i in np.flatnonzero(inside_mask)]
+ total = len(selected_ids)
+
+ start = int(request.offset)
+ end = int(request.offset + request.limit)
+ sample_ids = selected_ids[start:end]
+
+ samples = ds.get_samples_by_ids(sample_ids)
+ sample_dicts = [s.to_api_dict(include_thumbnail=request.include_thumbnails) for s in samples]
+
+ return {
+ "total": total,
+ "offset": request.offset,
+ "limit": request.limit,
+ "sample_ids": sample_ids,
+ "samples": sample_dicts,
+ }
+
+ @app.get("/api/search/similar/{sample_id}", response_model=SimilaritySearchResponse)
+ async def search_similar(
+ sample_id: str,
+ ds: Dataset = Depends(get_dataset),
+ k: int = Query(10, ge=1, le=100),
+ space_key: str | None = None,
+ ):
+ """Return k nearest neighbors for a given sample."""
+ try:
+ similar = ds.find_similar(
+ sample_id, k=k, space_key=space_key
+ )
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
+ except KeyError:
+ raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
+
+ results = []
+ for sample, distance in similar:
+ try:
+ thumbnail = sample.get_thumbnail_base64()
+ except Exception:
+ thumbnail = None
+
+ results.append(
+ SimilarSampleResponse(
+ id=sample.id,
+ filepath=sample.filepath,
+ filename=sample.filename,
+ label=sample.label,
+ thumbnail=thumbnail,
+ distance=distance,
+ metadata=sample.metadata,
+ )
+ )
+
+ return SimilaritySearchResponse(
+ query_id=sample_id,
+ k=k,
+ results=results,
+ )
+
@app.get("/api/thumbnail/{sample_id}")
- async def get_thumbnail(sample_id: str):
+ async def get_thumbnail(sample_id: str, ds: Dataset = Depends(get_dataset)):
"""Get thumbnail image for a sample."""
- if _current_dataset is None:
- raise HTTPException(status_code=404, detail="No dataset loaded")
-
try:
- sample = _current_dataset[sample_id]
+ sample = ds[sample_id]
thumbnail_b64 = sample.get_thumbnail_base64()
return JSONResponse({"thumbnail": thumbnail_b64})
except KeyError:
raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
+ @app.get("/api/video/{sample_id}")
+ async def get_video(sample_id: str, ds: Dataset = Depends(get_dataset)):
+ """Stream an MP4 clip for a sample."""
+ try:
+ sample = ds[sample_id]
+ except KeyError:
+ raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
+
+ video_path = _resolve_video_file(sample)
+ if video_path is None:
+ raise HTTPException(status_code=404, detail=f"No video file found for sample: {sample_id}")
+
+ return FileResponse(str(video_path), media_type="video/mp4", filename=video_path.name)
+
+ @app.get("/api/annotations/{sample_id}")
+ async def get_annotations(sample_id: str, ds: Dataset = Depends(get_dataset)):
+ """Return caption/reasoning and curation scores for a sample."""
+ try:
+ sample = ds[sample_id]
+ except KeyError:
+ raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
+
+ metadata = sample.metadata if isinstance(sample.metadata, dict) else {}
+
+ raw_caption = metadata.get("caption_raw") or metadata.get("first_caption")
+ raw_caption_str = raw_caption if isinstance(raw_caption, str) else None
+
+ reasoning = metadata.get("caption_reasoning") if isinstance(metadata.get("caption_reasoning"), str) else None
+ answer = metadata.get("caption_answer") if isinstance(metadata.get("caption_answer"), str) else None
+
+ if answer is None and raw_caption_str:
+ parsed_reasoning, parsed_answer = _extract_caption_sections(raw_caption_str)
+ reasoning = reasoning or parsed_reasoning
+ answer = parsed_answer
+
+ return {
+ "id": sample.id,
+ "caption": answer,
+ "reasoning": reasoning,
+ "raw_caption": raw_caption_str,
+ "aesthetic_score": metadata.get("aesthetic_score"),
+ "motion_score": metadata.get("motion_score"),
+ "dedup_status": metadata.get("dedup_status"),
+ "dedup_keep": metadata.get("dedup_keep"),
+ "cosine_sim_score": metadata.get("cosine_sim_score") or metadata.get("dedup_cosine_similarity"),
+ "source_video": metadata.get("source_video"),
+ "video_path": metadata.get("video_path") or metadata.get("clip_location"),
+ "span": metadata.get("span") or metadata.get("duration_span"),
+ }
+
+ @app.get("/api/curation/stats")
+ async def get_curation_stats(ds: Dataset = Depends(get_dataset)):
+ """Return dataset-level curation statistics for dashboarding."""
+ samples = ds.samples
+
+ total = len(samples)
+ with_video = 0
+ with_caption = 0
+ dedup_counts: dict[str, int] = {"kept": 0, "removed": 0, "unknown": 0}
+ aesthetic_values: list[float] = []
+ motion_values: list[float] = []
+
+ for sample in samples:
+ metadata = sample.metadata if isinstance(sample.metadata, dict) else {}
+
+ if _sample_has_video(sample):
+ with_video += 1
+
+ if _sample_caption_text(metadata):
+ with_caption += 1
+
+ dedup_status = _metadata_text(metadata, "dedup_status") or "unknown"
+ dedup_counts[dedup_status] = dedup_counts.get(dedup_status, 0) + 1
+
+ aesthetic = _metadata_number(metadata, "aesthetic_score")
+ if aesthetic is not None:
+ aesthetic_values.append(aesthetic)
+
+ motion = _metadata_number(metadata, "motion_score")
+ if motion is not None:
+ motion_values.append(motion)
+
+ return {
+ "total_samples": total,
+ "with_video": with_video,
+ "with_caption": with_caption,
+ "dedup_counts": dedup_counts,
+ "score_summary": {
+ "aesthetic": _score_summary(aesthetic_values),
+ "motion": _score_summary(motion_values),
+ },
+ "aesthetic_histogram": _histogram(aesthetic_values, bins=12, lower=0.0, upper=10.0),
+ "motion_histogram": _histogram(motion_values, bins=12, lower=0.0, upper=10.0),
+ }
+
+ @app.post("/api/curation/filter")
+ async def post_curation_filter(request: CurationFilterRequest, ds: Dataset = Depends(get_dataset)):
+ """Filter samples by curation metadata and return paginated results."""
+ if request.offset < 0:
+ raise HTTPException(status_code=400, detail="offset must be >= 0")
+ if request.limit < 1 or request.limit > 2000:
+ raise HTTPException(status_code=400, detail="limit must be between 1 and 2000")
+
+ if request.dedup_status and request.dedup_status not in {"kept", "removed", "unknown"}:
+ raise HTTPException(
+ status_code=400,
+ detail="dedup_status must be one of: kept, removed, unknown",
+ )
+
+ filtered_ids: list[str] = []
+ for sample in ds.samples:
+ if _matches_curation_filter(sample, request):
+ filtered_ids.append(sample.id)
+
+ total = len(filtered_ids)
+ start = int(request.offset)
+ end = int(request.offset + request.limit)
+ page_ids = filtered_ids[start:end]
+ page_samples = ds.get_samples_by_ids(page_ids)
+
+ return {
+ "total": total,
+ "offset": request.offset,
+ "limit": request.limit,
+ "sample_ids": page_ids,
+ "samples": [s.to_api_dict(include_thumbnail=request.include_thumbnails) for s in page_samples],
+ }
+
# Serve static frontend files
static_dir = Path(__file__).parent / "static"
if static_dir.exists():
diff --git a/src/hyperview/server/static/404.html b/src/hyperview/server/static/404.html
deleted file mode 100644
index 27fc3dd..0000000
--- a/src/hyperview/server/static/404.html
+++ /dev/null
@@ -1 +0,0 @@
-404: This page could not be found. HyperView
404
This page could not be found.
\ No newline at end of file
diff --git a/src/hyperview/server/static/404/index.html b/src/hyperview/server/static/404/index.html
deleted file mode 100644
index 27fc3dd..0000000
--- a/src/hyperview/server/static/404/index.html
+++ /dev/null
@@ -1 +0,0 @@
-404: This page could not be found. HyperView
404
This page could not be found.
\ No newline at end of file
diff --git a/src/hyperview/server/static/__next.__PAGE__.txt b/src/hyperview/server/static/__next.__PAGE__.txt
deleted file mode 100644
index 61e8c5a..0000000
--- a/src/hyperview/server/static/__next.__PAGE__.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-1:"$Sreact.fragment"
-2:I[47257,["/_next/static/chunks/42879de7b8087bc9.js"],"ClientPageRoot"]
-3:I[52683,["/_next/static/chunks/640b68f22e2796e6.js","/_next/static/chunks/71c75872eed19356.js"],"default"]
-6:I[97367,["/_next/static/chunks/42879de7b8087bc9.js"],"OutletBoundary"]
-7:"$Sreact.suspense"
-0:{"buildId":"lAwT6e3uaMqLUXxndbn_f","rsc":["$","$1","c",{"children":[["$","$L2",null,{"Component":"$3","serverProvidedParams":{"searchParams":{},"params":{},"promises":["$@4","$@5"]}}],[["$","script","script-0",{"src":"/_next/static/chunks/640b68f22e2796e6.js","async":true}],["$","script","script-1",{"src":"/_next/static/chunks/71c75872eed19356.js","async":true}]],["$","$L6",null,{"children":["$","$7",null,{"name":"Next.MetadataOutlet","children":"$@8"}]}]]}],"loading":null,"isPartial":false}
-4:{}
-5:"$0:rsc:props:children:0:props:serverProvidedParams:params"
-8:null
diff --git a/src/hyperview/server/static/__next._full.txt b/src/hyperview/server/static/__next._full.txt
deleted file mode 100644
index f4d4f19..0000000
--- a/src/hyperview/server/static/__next._full.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-1:"$Sreact.fragment"
-2:I[39756,["/_next/static/chunks/42879de7b8087bc9.js"],"default"]
-3:I[37457,["/_next/static/chunks/42879de7b8087bc9.js"],"default"]
-4:I[47257,["/_next/static/chunks/42879de7b8087bc9.js"],"ClientPageRoot"]
-5:I[52683,["/_next/static/chunks/640b68f22e2796e6.js","/_next/static/chunks/71c75872eed19356.js"],"default"]
-8:I[97367,["/_next/static/chunks/42879de7b8087bc9.js"],"OutletBoundary"]
-9:"$Sreact.suspense"
-b:I[97367,["/_next/static/chunks/42879de7b8087bc9.js"],"ViewportBoundary"]
-d:I[97367,["/_next/static/chunks/42879de7b8087bc9.js"],"MetadataBoundary"]
-f:I[68027,["/_next/static/chunks/42879de7b8087bc9.js"],"default"]
-:HL["/_next/static/chunks/19d0d66700a996ca.css","style"]
-0:{"P":null,"b":"lAwT6e3uaMqLUXxndbn_f","c":["",""],"q":"","i":false,"f":[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],[["$","$1","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/chunks/19d0d66700a996ca.css","precedence":"next","crossOrigin":"$undefined","nonce":"$undefined"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"antialiased","children":["$","$L2",null,{"parallelRouterKey":"children","error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L3",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":404}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],[]],"forbidden":"$undefined","unauthorized":"$undefined"}]}]}]]}],{"children":[["$","$1","c",{"children":[["$","$L4",null,{"Component":"$5","serverProvidedParams":{"searchParams":{},"params":{},"promises":["$@6","$@7"]}}],[["$","script","script-0",{"src":"/_next/static/chunks/640b68f22e2796e6.js","async":true,"nonce":"$undefined"}],["$","script","script-1",{"src":"/_next/static/chunks/71c75872eed19356.js","async":true,"nonce":"$undefined"}]],["$","$L8",null,{"children":["$","$9",null,{"name":"Next.MetadataOutlet","children":"$@a"}]}]]}],{},null,false,false]},null,false,false],["$","$1","h",{"children":[null,["$","$Lb",null,{"children":"$@c"}],["$","div",null,{"hidden":true,"children":["$","$Ld",null,{"children":["$","$9",null,{"name":"Next.Metadata","children":"$@e"}]}]}],null]}],false]],"m":"$undefined","G":["$f",[]],"S":true}
-6:{}
-7:"$0:f:0:1:1:children:0:props:children:0:props:serverProvidedParams:params"
-c:[["$","meta","0",{"charSet":"utf-8"}],["$","meta","1",{"name":"viewport","content":"width=device-width, initial-scale=1"}]]
-e:[["$","title","0",{"children":"HyperView"}],["$","meta","1",{"name":"description","content":"Dataset visualization with hyperbolic embeddings"}]]
-a:null
diff --git a/src/hyperview/server/static/__next._head.txt b/src/hyperview/server/static/__next._head.txt
deleted file mode 100644
index 6739360..0000000
--- a/src/hyperview/server/static/__next._head.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-1:"$Sreact.fragment"
-2:I[97367,["/_next/static/chunks/42879de7b8087bc9.js"],"ViewportBoundary"]
-4:I[97367,["/_next/static/chunks/42879de7b8087bc9.js"],"MetadataBoundary"]
-5:"$Sreact.suspense"
-0:{"buildId":"lAwT6e3uaMqLUXxndbn_f","rsc":["$","$1","h",{"children":[null,["$","$L2",null,{"children":"$@3"}],["$","div",null,{"hidden":true,"children":["$","$L4",null,{"children":["$","$5",null,{"name":"Next.Metadata","children":"$@6"}]}]}],null]}],"loading":null,"isPartial":false}
-3:[["$","meta","0",{"charSet":"utf-8"}],["$","meta","1",{"name":"viewport","content":"width=device-width, initial-scale=1"}]]
-6:[["$","title","0",{"children":"HyperView"}],["$","meta","1",{"name":"description","content":"Dataset visualization with hyperbolic embeddings"}]]
diff --git a/src/hyperview/server/static/__next._index.txt b/src/hyperview/server/static/__next._index.txt
deleted file mode 100644
index 792a2d1..0000000
--- a/src/hyperview/server/static/__next._index.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-1:"$Sreact.fragment"
-2:I[39756,["/_next/static/chunks/42879de7b8087bc9.js"],"default"]
-3:I[37457,["/_next/static/chunks/42879de7b8087bc9.js"],"default"]
-:HL["/_next/static/chunks/19d0d66700a996ca.css","style"]
-0:{"buildId":"lAwT6e3uaMqLUXxndbn_f","rsc":["$","$1","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/chunks/19d0d66700a996ca.css","precedence":"next"}]],["$","html",null,{"lang":"en","children":["$","body",null,{"className":"antialiased","children":["$","$L2",null,{"parallelRouterKey":"children","template":["$","$L3",null,{}],"notFound":[[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":404}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],[]]}]}]}]]}],"loading":null,"isPartial":false}
diff --git a/src/hyperview/server/static/__next._tree.txt b/src/hyperview/server/static/__next._tree.txt
deleted file mode 100644
index b551f3c..0000000
--- a/src/hyperview/server/static/__next._tree.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-:HL["/_next/static/chunks/19d0d66700a996ca.css","style"]
-0:{"buildId":"lAwT6e3uaMqLUXxndbn_f","tree":{"name":"","paramType":null,"paramKey":"","hasRuntimePrefetch":false,"slots":{"children":{"name":"__PAGE__","paramType":null,"paramKey":"__PAGE__","hasRuntimePrefetch":false,"slots":null,"isRootLayout":false}},"isRootLayout":true},"staleTime":300}
diff --git a/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_buildManifest.js b/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_buildManifest.js
deleted file mode 100644
index 1af6a5b..0000000
--- a/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_buildManifest.js
+++ /dev/null
@@ -1,15 +0,0 @@
-self.__BUILD_MANIFEST = {
- "__rewrites": {
- "afterFiles": [
- {
- "source": "/api/:path*"
- }
- ],
- "beforeFiles": [],
- "fallback": []
- },
- "sortedPages": [
- "/_app",
- "/_error"
- ]
-};self.__BUILD_MANIFEST_CB && self.__BUILD_MANIFEST_CB()
\ No newline at end of file
diff --git a/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_clientMiddlewareManifest.json b/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_clientMiddlewareManifest.json
deleted file mode 100644
index 0637a08..0000000
--- a/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_clientMiddlewareManifest.json
+++ /dev/null
@@ -1 +0,0 @@
-[]
\ No newline at end of file
diff --git a/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_ssgManifest.js b/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_ssgManifest.js
deleted file mode 100644
index 5b3ff59..0000000
--- a/src/hyperview/server/static/_next/static/bTv_hKoV745UqFKtrs1rd/_ssgManifest.js
+++ /dev/null
@@ -1 +0,0 @@
-self.__SSG_MANIFEST=new Set([]);self.__SSG_MANIFEST_CB&&self.__SSG_MANIFEST_CB()
\ No newline at end of file
diff --git a/src/hyperview/server/static/_next/static/chunks/19d0d66700a996ca.css b/src/hyperview/server/static/_next/static/chunks/19d0d66700a996ca.css
deleted file mode 100644
index 4f6fdf1..0000000
--- a/src/hyperview/server/static/_next/static/chunks/19d0d66700a996ca.css
+++ /dev/null
@@ -1 +0,0 @@
-*,:before,:after,::backdrop{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:#3b82f680;--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }*,:before,:after{box-sizing:border-box;border:0 solid #e5e7eb}:before,:after{--tw-content:""}html,:host{-webkit-text-size-adjust:100%;tab-size:4;font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent;font-family:ui-sans-serif,system-ui,sans-serif,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;line-height:1.5}body{line-height:inherit;margin:0}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;-webkit-text-decoration:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-feature-settings:normal;font-variation-settings:normal;font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:1em}small{font-size:80%}sub,sup{vertical-align:baseline;font-size:75%;line-height:0;position:relative}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-feature-settings:inherit;font-variation-settings:inherit;font-family:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:#0000;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button{height:auto}::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dl,dd,h1,h2,h3,h4,h5,h6,hr,figure,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}ol,ul,menu{margin:0;padding:0;list-style:none}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder{opacity:1;color:#9ca3af}textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}button,[role=button]{cursor:pointer}:disabled{cursor:default}img,svg,video,canvas,audio,iframe,embed,object{vertical-align:middle;display:block}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}.\!container{width:100%!important}.container{width:100%}@media (min-width:640px){.\!container{max-width:640px!important}.container{max-width:640px}}@media (min-width:768px){.\!container{max-width:768px!important}.container{max-width:768px}}@media (min-width:1024px){.\!container{max-width:1024px!important}.container{max-width:1024px}}@media (min-width:1280px){.\!container{max-width:1280px!important}.container{max-width:1280px}}@media (min-width:1536px){.\!container{max-width:1536px!important}.container{max-width:1536px}}.pointer-events-none{pointer-events:none}.absolute{position:absolute}.relative{position:relative}.inset-0{inset:0}.bottom-1{bottom:.25rem}.left-1{left:.25rem}.right-1{right:.25rem}.top-1{top:.25rem}.z-10{z-index:10}.mx-auto{margin-left:auto;margin-right:auto}.mb-2{margin-bottom:.5rem}.mb-4{margin-bottom:1rem}.mt-4{margin-top:1rem}.inline-block{display:inline-block}.flex{display:flex}.h-14{height:3.5rem}.h-3{height:.75rem}.h-5{height:1.25rem}.h-8{height:2rem}.h-full{height:100%}.h-screen{height:100vh}.w-1\/2{width:50%}.w-3{width:.75rem}.w-36{width:9rem}.w-5{width:1.25rem}.w-8{width:2rem}.w-full{width:100%}.min-w-0{min-width:0}.max-w-full{max-width:100%}.flex-1{flex:1}.flex-shrink-0{flex-shrink:0}.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y))rotate(var(--tw-rotate))skewX(var(--tw-skew-x))skewY(var(--tw-skew-y))scaleX(var(--tw-scale-x))scaleY(var(--tw-scale-y))}@keyframes spin{to{transform:rotate(360deg)}}.animate-spin{animation:1s linear infinite spin}.cursor-pointer{cursor:pointer}.resize{resize:both}.flex-col{flex-direction:column}.items-center{align-items:center}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.gap-2{gap:.5rem}.gap-3{gap:.75rem}.gap-4{gap:1rem}.space-y-1>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.25rem*calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.25rem*var(--tw-space-y-reverse))}.overflow-auto{overflow:auto}.overflow-hidden{overflow:hidden}.overflow-y-auto{overflow-y:auto}.truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.rounded{border-radius:.25rem}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:.5rem}.rounded-md{border-radius:.375rem}.border{border-width:1px}.border-2{border-width:2px}.border-b{border-bottom-width:1px}.border-l{border-left-width:1px}.border-t{border-top-width:1px}.border-border{--tw-border-opacity:1;border-color:rgb(63 63 70/var(--tw-border-opacity,1))}.border-primary{--tw-border-opacity:1;border-color:rgb(79 70 229/var(--tw-border-opacity,1))}.border-t-transparent{border-top-color:#0000}.bg-background{--tw-bg-opacity:1;background-color:rgb(10 10 11/var(--tw-bg-opacity,1))}.bg-primary{--tw-bg-opacity:1;background-color:rgb(79 70 229/var(--tw-bg-opacity,1))}.bg-surface{--tw-bg-opacity:1;background-color:rgb(24 24 27/var(--tw-bg-opacity,1))}.bg-surface-light{--tw-bg-opacity:1;background-color:rgb(39 39 42/var(--tw-bg-opacity,1))}.bg-surface\/80{background-color:#18181bcc}.object-cover{-o-object-fit:cover;object-fit:cover}.p-2{padding:.5rem}.px-1{padding-left:.25rem;padding-right:.25rem}.px-1\.5{padding-left:.375rem;padding-right:.375rem}.px-3{padding-left:.75rem;padding-right:.75rem}.px-4{padding-left:1rem;padding-right:1rem}.py-0\.5{padding-top:.125rem;padding-bottom:.125rem}.py-1{padding-top:.25rem;padding-bottom:.25rem}.py-1\.5{padding-top:.375rem;padding-bottom:.375rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.text-center{text-align:center}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xs{font-size:.75rem;line-height:1rem}.font-medium{font-weight:500}.font-semibold{font-weight:600}.text-red-500{--tw-text-opacity:1;color:rgb(239 68 68/var(--tw-text-opacity,1))}.text-text{--tw-text-opacity:1;color:rgb(250 250 250/var(--tw-text-opacity,1))}.text-text-muted{--tw-text-opacity:1;color:rgb(161 161 170/var(--tw-text-opacity,1))}.text-white{--tw-text-opacity:1;color:rgb(255 255 255/var(--tw-text-opacity,1))}.antialiased{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.opacity-70{opacity:.7}.ring-2{--tw-ring-offset-shadow:var(--tw-ring-inset)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset)0 0 0 calc(2px + var(--tw-ring-offset-width))var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.ring-primary{--tw-ring-opacity:1;--tw-ring-color:rgb(79 70 229/var(--tw-ring-opacity,1))}.ring-primary-light{--tw-ring-opacity:1;--tw-ring-color:rgb(129 140 248/var(--tw-ring-opacity,1))}.filter{filter:var(--tw-blur)var(--tw-brightness)var(--tw-contrast)var(--tw-grayscale)var(--tw-hue-rotate)var(--tw-invert)var(--tw-saturate)var(--tw-sepia)var(--tw-drop-shadow)}.transition-all{transition-property:all;transition-duration:.15s;transition-timing-function:cubic-bezier(.4,0,.2,1)}.transition-colors{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-duration:.15s;transition-timing-function:cubic-bezier(.4,0,.2,1)}.duration-150{transition-duration:.15s}.ease-out{transition-timing-function:cubic-bezier(0,0,.2,1)}:root{--background:#0a0a0b;--surface:#18181b;--surface-light:#27272a;--border:#3f3f46;--primary:#4f46e5;--primary-light:#818cf8;--text:#fafafa;--text-muted:#a1a1aa}*{box-sizing:border-box}body{background-color:var(--background);color:var(--text);-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif}::-webkit-scrollbar{width:8px;height:8px}::-webkit-scrollbar-track{background:var(--surface)}::-webkit-scrollbar-thumb{background:var(--border);border-radius:4px}::-webkit-scrollbar-thumb:hover{background:#4d4d4d}.hide-scrollbar::-webkit-scrollbar{display:none}.hide-scrollbar{-ms-overflow-style:none;scrollbar-width:none}.hover\:bg-border:hover{--tw-bg-opacity:1;background-color:rgb(63 63 70/var(--tw-bg-opacity,1))}.hover\:bg-surface-light:hover{--tw-bg-opacity:1;background-color:rgb(39 39 42/var(--tw-bg-opacity,1))}
diff --git a/src/hyperview/server/static/_next/static/chunks/42879de7b8087bc9.js b/src/hyperview/server/static/_next/static/chunks/42879de7b8087bc9.js
deleted file mode 100644
index bda8a81..0000000
--- a/src/hyperview/server/static/_next/static/chunks/42879de7b8087bc9.js
+++ /dev/null
@@ -1 +0,0 @@
-(globalThis.TURBOPACK||(globalThis.TURBOPACK=[])).push(["object"==typeof document?document.currentScript:void 0,33525,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"warnOnce",{enumerable:!0,get:function(){return n}});let n=e=>{}},91915,(e,r,t)=>{"use strict";function n(e,r={}){if(r.onlyHashChange)return void e();let t=document.documentElement;if("smooth"!==t.dataset.scrollBehavior)return void e();let a=t.style.scrollBehavior;t.style.scrollBehavior="auto",r.dontForceLayout||t.getClientRects(),e(),t.style.scrollBehavior=a}Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"disableSmoothScrollDuringRouteTransition",{enumerable:!0,get:function(){return n}}),e.r(33525)},68017,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"HTTPAccessFallbackBoundary",{enumerable:!0,get:function(){return i}});let n=e.r(90809),a=e.r(43476),o=n._(e.r(71645)),c=e.r(90373),u=e.r(54394);e.r(33525);let l=e.r(8372);class s extends o.default.Component{constructor(e){super(e),this.state={triggeredStatus:void 0,previousPathname:e.pathname}}componentDidCatch(){}static getDerivedStateFromError(e){if((0,u.isHTTPAccessFallbackError)(e))return{triggeredStatus:(0,u.getAccessFallbackHTTPStatus)(e)};throw e}static getDerivedStateFromProps(e,r){return e.pathname!==r.previousPathname&&r.triggeredStatus?{triggeredStatus:void 0,previousPathname:e.pathname}:{triggeredStatus:r.triggeredStatus,previousPathname:e.pathname}}render(){let{notFound:e,forbidden:r,unauthorized:t,children:n}=this.props,{triggeredStatus:o}=this.state,c={[u.HTTPAccessErrorStatus.NOT_FOUND]:e,[u.HTTPAccessErrorStatus.FORBIDDEN]:r,[u.HTTPAccessErrorStatus.UNAUTHORIZED]:t};if(o){let l=o===u.HTTPAccessErrorStatus.NOT_FOUND&&e,s=o===u.HTTPAccessErrorStatus.FORBIDDEN&&r,i=o===u.HTTPAccessErrorStatus.UNAUTHORIZED&&t;return l||s||i?(0,a.jsxs)(a.Fragment,{children:[(0,a.jsx)("meta",{name:"robots",content:"noindex"}),!1,c[o]]}):n}return n}}function i({notFound:e,forbidden:r,unauthorized:t,children:n}){let u=(0,c.useUntrackedPathname)(),i=(0,o.useContext)(l.MissingSlotContext);return e||r||t?(0,a.jsx)(s,{pathname:u,notFound:e,forbidden:r,unauthorized:t,missingSlots:i,children:n}):(0,a.jsx)(a.Fragment,{children:n})}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},91798,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"useRouterBFCache",{enumerable:!0,get:function(){return a}});let n=e.r(71645);function a(e,r){let[t,a]=(0,n.useState)(()=>({tree:e,stateKey:r,next:null}));if(t.tree===e)return t;let o={tree:e,stateKey:r,next:null},c=1,u=t,l=o;for(;null!==u&&c<1;){if(u.stateKey===r){l.next=u.next;break}{c++;let e={tree:u.tree,stateKey:u.stateKey,next:null};l.next=e,l=e}u=u.next}return a(o),o}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},39756,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"default",{enumerable:!0,get:function(){return T}});let n=e.r(55682),a=e.r(90809),o=e.r(43476),c=e.r(88540),u=a._(e.r(71645)),l=n._(e.r(74080)),s=e.r(8372),i=e.r(87288),d=e.r(1244),f=e.r(72383),p=e.r(56019),h=e.r(91915),m=e.r(58442),g=e.r(68017),y=e.r(70725),b=e.r(84356),P=e.r(41538),_=e.r(91798);e.r(74180);let v=e.r(61994),O=e.r(33906),S=l.default.__DOM_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE,E=["bottom","height","left","right","top","width","x","y"];function R(e,r){let t=e.getBoundingClientRect();return t.top>=0&&t.top<=r}class j extends u.default.Component{componentDidMount(){this.handlePotentialScroll()}componentDidUpdate(){this.props.focusAndScrollRef.apply&&this.handlePotentialScroll()}render(){return this.props.children}constructor(...e){super(...e),this.handlePotentialScroll=()=>{let{focusAndScrollRef:e,segmentPath:r}=this.props;if(e.apply){if(0!==e.segmentPaths.length&&!e.segmentPaths.some(e=>r.every((r,t)=>(0,p.matchSegment)(r,e[t]))))return;let t=null,n=e.hashFragment;if(n&&(t="top"===n?document.body:document.getElementById(n)??document.getElementsByName(n)[0]),t||(t="undefined"==typeof window?null:(0,S.findDOMNode)(this)),!(t instanceof Element))return;for(;!(t instanceof HTMLElement)||function(e){if(["sticky","fixed"].includes(getComputedStyle(e).position))return!0;let r=e.getBoundingClientRect();return E.every(e=>0===r[e])}(t);){if(null===t.nextElementSibling)return;t=t.nextElementSibling}e.apply=!1,e.hashFragment=null,e.segmentPaths=[],(0,h.disableSmoothScrollDuringRouteTransition)(()=>{if(n)return void t.scrollIntoView();let e=document.documentElement,r=e.clientHeight;!R(t,r)&&(e.scrollTop=0,R(t,r)||t.scrollIntoView())},{dontForceLayout:!0,onlyHashChange:e.onlyHashChange}),e.onlyHashChange=!1,t.focus()}}}}function w({segmentPath:e,children:r}){let t=(0,u.useContext)(s.GlobalLayoutRouterContext);if(!t)throw Object.defineProperty(Error("invariant global layout router not mounted"),"__NEXT_ERROR_CODE",{value:"E473",enumerable:!1,configurable:!0});return(0,o.jsx)(j,{segmentPath:e,focusAndScrollRef:t.focusAndScrollRef,children:r})}function C({tree:e,segmentPath:r,debugNameContext:t,cacheNode:n,params:a,url:l,isActive:f}){let h=(0,u.useContext)(s.GlobalLayoutRouterContext);if((0,u.useContext)(v.NavigationPromisesContext),!h)throw Object.defineProperty(Error("invariant global layout router not mounted"),"__NEXT_ERROR_CODE",{value:"E473",enumerable:!1,configurable:!0});let{tree:m}=h,g=null!==n.prefetchRsc?n.prefetchRsc:n.rsc,y=(0,u.useDeferredValue)(n.rsc,g),_="object"==typeof y&&null!==y&&"function"==typeof y.then?(0,u.use)(y):y;if(!_){if(f){let e=n.lazyData;if(null===e){let t=function e(r,t){if(r){let[n,a]=r,o=2===r.length;if((0,p.matchSegment)(t[0],n)&&t[1].hasOwnProperty(a)){if(o){let r=e(void 0,t[1][a]);return[t[0],{...t[1],[a]:[r[0],r[1],r[2],"refetch"]}]}return[t[0],{...t[1],[a]:e(r.slice(2),t[1][a])}]}}return t}(["",...r],m),a=(0,b.hasInterceptionRouteInCurrentTree)(m),o=Date.now();n.lazyData=e=(0,i.fetchServerResponse)(new URL(l,location.origin),{flightRouterState:t,nextUrl:a?h.previousNextUrl||h.nextUrl:null}).then(e=>((0,u.startTransition)(()=>{(0,P.dispatchAppRouterAction)({type:c.ACTION_SERVER_PATCH,previousTree:m,serverResponse:e,navigatedAt:o})}),e)),(0,u.use)(e)}}(0,u.use)(d.unresolvedThenable)}return(0,o.jsx)(s.LayoutRouterContext.Provider,{value:{parentTree:e,parentCacheNode:n,parentSegmentPath:r,parentParams:a,debugNameContext:t,url:l,isActive:f},children:_})}function x({name:e,loading:r,children:t}){let n;if(n="object"==typeof r&&null!==r&&"function"==typeof r.then?(0,u.use)(r):r){let r=n[0],a=n[1],c=n[2];return(0,o.jsx)(u.Suspense,{name:e,fallback:(0,o.jsxs)(o.Fragment,{children:[a,c,r]}),children:t})}return(0,o.jsx)(o.Fragment,{children:t})}function T({parallelRouterKey:e,error:r,errorStyles:t,errorScripts:n,templateStyles:a,templateScripts:c,template:l,notFound:i,forbidden:d,unauthorized:p,segmentViewBoundaries:h}){let b=(0,u.useContext)(s.LayoutRouterContext);if(!b)throw Object.defineProperty(Error("invariant expected layout router to be mounted"),"__NEXT_ERROR_CODE",{value:"E56",enumerable:!1,configurable:!0});let{parentTree:P,parentCacheNode:v,parentSegmentPath:S,parentParams:E,url:R,isActive:j,debugNameContext:T}=b,A=v.parallelRoutes,M=A.get(e);M||(M=new Map,A.set(e,M));let F=P[0],D=null===S?[e]:S.concat([F,e]),k=P[1][e],N=k[0],I=(0,y.createRouterCacheKey)(N,!0),U=(0,_.useRouterBFCache)(k,I),H=[];do{let e=U.tree,u=U.stateKey,h=e[0],b=(0,y.createRouterCacheKey)(h),P=M.get(b);if(void 0===P){let e={lazyData:null,rsc:null,prefetchRsc:null,head:null,prefetchHead:null,parallelRoutes:new Map,loading:null,navigatedAt:-1};P=e,M.set(b,e)}let _=E;if(Array.isArray(h)){let e=h[0],r=h[1],t=h[2],n=(0,O.getParamValueFromCacheKey)(r,t);null!==n&&(_={...E,[e]:n})}let S=function(e){if("/"===e)return"/";if("string"==typeof e)if("(slot)"===e)return;else return e+"/";return e[1]+"/"}(h),A=S??T,F=void 0===S?void 0:T,k=v.loading,N=(0,o.jsxs)(s.TemplateContext.Provider,{value:(0,o.jsxs)(w,{segmentPath:D,children:[(0,o.jsx)(f.ErrorBoundary,{errorComponent:r,errorStyles:t,errorScripts:n,children:(0,o.jsx)(x,{name:F,loading:k,children:(0,o.jsx)(g.HTTPAccessFallbackBoundary,{notFound:i,forbidden:d,unauthorized:p,children:(0,o.jsxs)(m.RedirectBoundary,{children:[(0,o.jsx)(C,{url:R,tree:e,params:_,cacheNode:P,segmentPath:D,debugNameContext:A,isActive:j&&u===I}),null]})})})}),null]}),children:[a,c,l]},u);H.push(N),U=U.next}while(null!==U)return H}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},37457,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"default",{enumerable:!0,get:function(){return u}});let n=e.r(90809),a=e.r(43476),o=n._(e.r(71645)),c=e.r(8372);function u(){let e=(0,o.useContext)(c.TemplateContext);return(0,a.jsx)(a.Fragment,{children:e})}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},93504,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"createRenderSearchParamsFromClient",{enumerable:!0,get:function(){return a}});let n=new WeakMap;function a(e){let r=n.get(e);if(r)return r;let t=Promise.resolve(e);return n.set(e,t),t}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},66996,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"createRenderSearchParamsFromClient",{enumerable:!0,get:function(){return n}});let n=e.r(93504).createRenderSearchParamsFromClient;("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},6831,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"createRenderParamsFromClient",{enumerable:!0,get:function(){return a}});let n=new WeakMap;function a(e){let r=n.get(e);if(r)return r;let t=Promise.resolve(e);return n.set(e,t),t}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},97689,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"createRenderParamsFromClient",{enumerable:!0,get:function(){return n}});let n=e.r(6831).createRenderParamsFromClient;("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},42715,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"ReflectAdapter",{enumerable:!0,get:function(){return n}});class n{static get(e,r,t){let n=Reflect.get(e,r,t);return"function"==typeof n?n.bind(e):n}static set(e,r,t,n){return Reflect.set(e,r,t,n)}static has(e,r){return Reflect.has(e,r)}static deleteProperty(e,r){return Reflect.deleteProperty(e,r)}}},76361,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"createDedupedByCallsiteServerErrorLoggerDev",{enumerable:!0,get:function(){return l}});let n=function(e,r){if(e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var t=a(void 0);if(t&&t.has(e))return t.get(e);var n={__proto__:null},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in e)if("default"!==c&&Object.prototype.hasOwnProperty.call(e,c)){var u=o?Object.getOwnPropertyDescriptor(e,c):null;u&&(u.get||u.set)?Object.defineProperty(n,c,u):n[c]=e[c]}return n.default=e,t&&t.set(e,n),n}(e.r(71645));function a(e){if("function"!=typeof WeakMap)return null;var r=new WeakMap,t=new WeakMap;return(a=function(e){return e?t:r})(e)}let o={current:null},c="function"==typeof n.cache?n.cache:e=>e,u=console.warn;function l(e){return function(...r){u(e(...r))}}c(e=>{try{u(o.current)}finally{o.current=null}})},65932,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n={describeHasCheckingStringProperty:function(){return u},describeStringPropertyAccess:function(){return c},wellKnownProperties:function(){return l}};for(var a in n)Object.defineProperty(t,a,{enumerable:!0,get:n[a]});let o=/^[A-Za-z_$][A-Za-z0-9_$]*$/;function c(e,r){return o.test(r)?`\`${e}.${r}\``:`\`${e}[${JSON.stringify(r)}]\``}function u(e,r){let t=JSON.stringify(r);return`\`Reflect.has(${e}, ${t})\`, \`${t} in ${e}\`, or similar`}let l=new Set(["hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toString","valueOf","toLocaleString","then","catch","finally","status","displayName","_debugInfo","toJSON","$$typeof","__esModule"])},83066,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"afterTaskAsyncStorageInstance",{enumerable:!0,get:function(){return n}});let n=(0,e.r(90317).createAsyncLocalStorage)()},41643,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"afterTaskAsyncStorage",{enumerable:!0,get:function(){return n.afterTaskAsyncStorageInstance}});let n=e.r(83066)},50999,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n={isRequestAPICallableInsideAfter:function(){return s},throwForSearchParamsAccessInUseCache:function(){return l},throwWithStaticGenerationBailoutErrorWithDynamicError:function(){return u}};for(var a in n)Object.defineProperty(t,a,{enumerable:!0,get:n[a]});let o=e.r(43248),c=e.r(41643);function u(e,r){throw Object.defineProperty(new o.StaticGenBailoutError(`Route ${e} with \`dynamic = "error"\` couldn't be rendered statically because it used ${r}. See more info here: https://nextjs.org/docs/app/building-your-application/rendering/static-and-dynamic#dynamic-rendering`),"__NEXT_ERROR_CODE",{value:"E543",enumerable:!1,configurable:!0})}function l(e,r){let t=Object.defineProperty(Error(`Route ${e.route} used \`searchParams\` inside "use cache". Accessing dynamic request data inside a cache scope is not supported. If you need some search params inside a cached function await \`searchParams\` outside of the cached function and pass only the required search params as arguments to the cached function. See more info here: https://nextjs.org/docs/messages/next-request-in-use-cache`),"__NEXT_ERROR_CODE",{value:"E842",enumerable:!1,configurable:!0});throw Error.captureStackTrace(t,r),e.invalidDynamicUsageError??=t,t}function s(){let e=c.afterTaskAsyncStorage.getStore();return(null==e?void 0:e.rootTaskSpawnPhase)==="action"}},69882,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n={createPrerenderSearchParamsForClientPage:function(){return g},createSearchParamsFromClient:function(){return p},createServerSearchParamsForMetadata:function(){return h},createServerSearchParamsForServerPage:function(){return m},makeErroringSearchParamsForUseCache:function(){return v}};for(var a in n)Object.defineProperty(t,a,{enumerable:!0,get:n[a]});let o=e.r(42715),c=e.r(67673),u=e.r(62141),l=e.r(12718),s=e.r(63138),i=e.r(76361),d=e.r(65932),f=e.r(50999);function p(e,r){let t=u.workUnitAsyncStorage.getStore();if(t)switch(t.type){case"prerender":case"prerender-client":case"prerender-ppr":case"prerender-legacy":return y(r,t);case"prerender-runtime":throw Object.defineProperty(new l.InvariantError("createSearchParamsFromClient should not be called in a runtime prerender."),"__NEXT_ERROR_CODE",{value:"E769",enumerable:!1,configurable:!0});case"cache":case"private-cache":case"unstable-cache":throw Object.defineProperty(new l.InvariantError("createSearchParamsFromClient should not be called in cache contexts."),"__NEXT_ERROR_CODE",{value:"E739",enumerable:!1,configurable:!0});case"request":return b(e,r,t)}(0,u.throwInvariantForMissingStore)()}e.r(42852);let h=m;function m(e,r){let t=u.workUnitAsyncStorage.getStore();if(t)switch(t.type){case"prerender":case"prerender-client":case"prerender-ppr":case"prerender-legacy":return y(r,t);case"cache":case"private-cache":case"unstable-cache":throw Object.defineProperty(new l.InvariantError("createServerSearchParamsForServerPage should not be called in cache contexts."),"__NEXT_ERROR_CODE",{value:"E747",enumerable:!1,configurable:!0});case"prerender-runtime":var n,a;return n=e,a=t,(0,c.delayUntilRuntimeStage)(a,O(n));case"request":return b(e,r,t)}(0,u.throwInvariantForMissingStore)()}function g(e){if(e.forceStatic)return Promise.resolve({});let r=u.workUnitAsyncStorage.getStore();if(r)switch(r.type){case"prerender":case"prerender-client":return(0,s.makeHangingPromise)(r.renderSignal,e.route,"`searchParams`");case"prerender-runtime":throw Object.defineProperty(new l.InvariantError("createPrerenderSearchParamsForClientPage should not be called in a runtime prerender."),"__NEXT_ERROR_CODE",{value:"E768",enumerable:!1,configurable:!0});case"cache":case"private-cache":case"unstable-cache":throw Object.defineProperty(new l.InvariantError("createPrerenderSearchParamsForClientPage should not be called in cache contexts."),"__NEXT_ERROR_CODE",{value:"E746",enumerable:!1,configurable:!0});case"prerender-ppr":case"prerender-legacy":case"request":return Promise.resolve({})}(0,u.throwInvariantForMissingStore)()}function y(e,r){if(e.forceStatic)return Promise.resolve({});switch(r.type){case"prerender":case"prerender-client":var t=e,n=r;let a=P.get(n);if(a)return a;let u=(0,s.makeHangingPromise)(n.renderSignal,t.route,"`searchParams`"),l=new Proxy(u,{get(e,r,t){if(Object.hasOwn(u,r))return o.ReflectAdapter.get(e,r,t);switch(r){case"then":return(0,c.annotateDynamicAccess)("`await searchParams`, `searchParams.then`, or similar",n),o.ReflectAdapter.get(e,r,t);case"status":return(0,c.annotateDynamicAccess)("`use(searchParams)`, `searchParams.status`, or similar",n),o.ReflectAdapter.get(e,r,t);default:return o.ReflectAdapter.get(e,r,t)}}});return P.set(n,l),l;case"prerender-ppr":case"prerender-legacy":var i=e,d=r;let p=P.get(i);if(p)return p;let h=Promise.resolve({}),m=new Proxy(h,{get(e,r,t){if(Object.hasOwn(h,r))return o.ReflectAdapter.get(e,r,t);if("string"==typeof r&&"then"===r){let e="`await searchParams`, `searchParams.then`, or similar";i.dynamicShouldError?(0,f.throwWithStaticGenerationBailoutErrorWithDynamicError)(i.route,e):"prerender-ppr"===d.type?(0,c.postponeWithTracking)(i.route,e,d.dynamicTracking):(0,c.throwToInterruptStaticGeneration)(e,i,d)}return o.ReflectAdapter.get(e,r,t)}});return P.set(i,m),m;default:return r}}function b(e,r,t){return r.forceStatic?Promise.resolve({}):O(e)}let P=new WeakMap,_=new WeakMap;function v(e){let r=_.get(e);if(r)return r;let t=Promise.resolve({}),n=new Proxy(t,{get:function r(n,a,c){return Object.hasOwn(t,a)||"string"!=typeof a||"then"!==a&&d.wellKnownProperties.has(a)||(0,f.throwForSearchParamsAccessInUseCache)(e,r),o.ReflectAdapter.get(n,a,c)}});return _.set(e,n),n}function O(e){let r=P.get(e);if(r)return r;let t=Promise.resolve(e);return P.set(e,t),t}(0,i.createDedupedByCallsiteServerErrorLoggerDev)(function(e,r){let t=e?`Route "${e}" `:"This route ";return Object.defineProperty(Error(`${t}used ${r}. \`searchParams\` is a Promise and must be unwrapped with \`await\` or \`React.use()\` before accessing its properties. Learn more: https://nextjs.org/docs/messages/sync-dynamic-apis`),"__NEXT_ERROR_CODE",{value:"E848",enumerable:!1,configurable:!0})})},74804,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"dynamicAccessAsyncStorageInstance",{enumerable:!0,get:function(){return n}});let n=(0,e.r(90317).createAsyncLocalStorage)()},88276,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"dynamicAccessAsyncStorage",{enumerable:!0,get:function(){return n.dynamicAccessAsyncStorageInstance}});let n=e.r(74804)},41489,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0});var n={createParamsFromClient:function(){return h},createPrerenderParamsForClientSegment:function(){return b},createServerParamsForMetadata:function(){return m},createServerParamsForRoute:function(){return g},createServerParamsForServerSegment:function(){return y}};for(var a in n)Object.defineProperty(t,a,{enumerable:!0,get:n[a]});let o=e.r(63599),c=e.r(42715),u=e.r(67673),l=e.r(62141),s=e.r(12718),i=e.r(65932),d=e.r(63138),f=e.r(76361),p=e.r(88276);function h(e,r){let t=l.workUnitAsyncStorage.getStore();if(t)switch(t.type){case"prerender":case"prerender-client":case"prerender-ppr":case"prerender-legacy":return P(e,r,t);case"cache":case"private-cache":case"unstable-cache":throw Object.defineProperty(new s.InvariantError("createParamsFromClient should not be called in cache contexts."),"__NEXT_ERROR_CODE",{value:"E736",enumerable:!1,configurable:!0});case"prerender-runtime":throw Object.defineProperty(new s.InvariantError("createParamsFromClient should not be called in a runtime prerender."),"__NEXT_ERROR_CODE",{value:"E770",enumerable:!1,configurable:!0});case"request":return S(e)}(0,l.throwInvariantForMissingStore)()}e.r(42852);let m=y;function g(e,r){let t=l.workUnitAsyncStorage.getStore();if(t)switch(t.type){case"prerender":case"prerender-client":case"prerender-ppr":case"prerender-legacy":return P(e,r,t);case"cache":case"private-cache":case"unstable-cache":throw Object.defineProperty(new s.InvariantError("createServerParamsForRoute should not be called in cache contexts."),"__NEXT_ERROR_CODE",{value:"E738",enumerable:!1,configurable:!0});case"prerender-runtime":return _(e,t);case"request":return S(e)}(0,l.throwInvariantForMissingStore)()}function y(e,r){let t=l.workUnitAsyncStorage.getStore();if(t)switch(t.type){case"prerender":case"prerender-client":case"prerender-ppr":case"prerender-legacy":return P(e,r,t);case"cache":case"private-cache":case"unstable-cache":throw Object.defineProperty(new s.InvariantError("createServerParamsForServerSegment should not be called in cache contexts."),"__NEXT_ERROR_CODE",{value:"E743",enumerable:!1,configurable:!0});case"prerender-runtime":return _(e,t);case"request":return S(e)}(0,l.throwInvariantForMissingStore)()}function b(e){let r=o.workAsyncStorage.getStore();if(!r)throw Object.defineProperty(new s.InvariantError("Missing workStore in createPrerenderParamsForClientSegment"),"__NEXT_ERROR_CODE",{value:"E773",enumerable:!1,configurable:!0});let t=l.workUnitAsyncStorage.getStore();if(t)switch(t.type){case"prerender":case"prerender-client":let n=t.fallbackRouteParams;if(n){for(let a in e)if(n.has(a))return(0,d.makeHangingPromise)(t.renderSignal,r.route,"`params`")}break;case"cache":case"private-cache":case"unstable-cache":throw Object.defineProperty(new s.InvariantError("createPrerenderParamsForClientSegment should not be called in cache contexts."),"__NEXT_ERROR_CODE",{value:"E734",enumerable:!1,configurable:!0})}return Promise.resolve(e)}function P(e,r,t){switch(t.type){case"prerender":case"prerender-client":{let n=t.fallbackRouteParams;if(n){for(let a in e)if(n.has(a))return function(e,r,t){let n=v.get(e);if(n)return n;let a=new Proxy((0,d.makeHangingPromise)(t.renderSignal,r.route,"`params`"),O);return v.set(e,a),a}(e,r,t)}break}case"prerender-ppr":{let n=t.fallbackRouteParams;if(n){for(let a in e)if(n.has(a))return function(e,r,t,n){let a=v.get(e);if(a)return a;let o={...e},c=Promise.resolve(o);return v.set(e,c),Object.keys(e).forEach(e=>{i.wellKnownProperties.has(e)||r.has(e)&&Object.defineProperty(o,e,{get(){let r=(0,i.describeStringPropertyAccess)("params",e);"prerender-ppr"===n.type?(0,u.postponeWithTracking)(t.route,r,n.dynamicTracking):(0,u.throwToInterruptStaticGeneration)(r,t,n)},enumerable:!0})}),c}(e,n,r,t)}}}return S(e)}function _(e,r){return(0,u.delayUntilRuntimeStage)(r,S(e))}let v=new WeakMap,O={get:function(e,r,t){if("then"===r||"catch"===r||"finally"===r){let n=c.ReflectAdapter.get(e,r,t);return({[r]:(...r)=>{let t=p.dynamicAccessAsyncStorage.getStore();return t&&t.abortController.abort(Object.defineProperty(Error("Accessed fallback `params` during prerendering."),"__NEXT_ERROR_CODE",{value:"E691",enumerable:!1,configurable:!0})),new Proxy(n.apply(e,r),O)}})[r]}return c.ReflectAdapter.get(e,r,t)}};function S(e){let r=v.get(e);if(r)return r;let t=Promise.resolve(e);return v.set(e,t),t}(0,f.createDedupedByCallsiteServerErrorLoggerDev)(function(e,r){let t=e?`Route "${e}" `:"This route ";return Object.defineProperty(Error(`${t}used ${r}. \`params\` is a Promise and must be unwrapped with \`await\` or \`React.use()\` before accessing its properties. Learn more: https://nextjs.org/docs/messages/sync-dynamic-apis`),"__NEXT_ERROR_CODE",{value:"E834",enumerable:!1,configurable:!0})})},47257,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"ClientPageRoot",{enumerable:!0,get:function(){return s}});let n=e.r(43476),a=e.r(12718),o=e.r(8372),c=e.r(71645),u=e.r(33906),l=e.r(61994);function s({Component:r,serverProvidedParams:t}){let s,i;if(null!==t)s=t.searchParams,i=t.params;else{let e=(0,c.use)(o.LayoutRouterContext);i=null!==e?e.parentParams:{},s=(0,u.urlSearchParamsToParsedUrlQuery)((0,c.use)(l.SearchParamsContext))}if("undefined"==typeof window){let t,o,{workAsyncStorage:c}=e.r(63599),u=c.getStore();if(!u)throw Object.defineProperty(new a.InvariantError("Expected workStore to exist when handling searchParams in a client Page."),"__NEXT_ERROR_CODE",{value:"E564",enumerable:!1,configurable:!0});let{createSearchParamsFromClient:l}=e.r(69882);t=l(s,u);let{createParamsFromClient:d}=e.r(41489);return o=d(i,u),(0,n.jsx)(r,{params:o,searchParams:t})}{let{createRenderSearchParamsFromClient:t}=e.r(66996),a=t(s),{createRenderParamsFromClient:o}=e.r(97689),c=o(i);return(0,n.jsx)(r,{params:c,searchParams:a})}}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},92825,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"ClientSegmentRoot",{enumerable:!0,get:function(){return u}});let n=e.r(43476),a=e.r(12718),o=e.r(8372),c=e.r(71645);function u({Component:r,slots:t,serverProvidedParams:u}){let l;if(null!==u)l=u.params;else{let e=(0,c.use)(o.LayoutRouterContext);l=null!==e?e.parentParams:{}}if("undefined"==typeof window){let o,{workAsyncStorage:c}=e.r(63599),u=c.getStore();if(!u)throw Object.defineProperty(new a.InvariantError("Expected workStore to exist when handling params in a client segment such as a Layout or Template."),"__NEXT_ERROR_CODE",{value:"E600",enumerable:!1,configurable:!0});let{createParamsFromClient:s}=e.r(41489);return o=s(l,u),(0,n.jsx)(r,{...t,params:o})}{let{createRenderParamsFromClient:a}=e.r(97689),o=a(l);return(0,n.jsx)(r,{...t,params:o})}}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),r.exports=t.default)},27201,(e,r,t)=>{"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"IconMark",{enumerable:!0,get:function(){return a}});let n=e.r(43476),a=()=>"undefined"!=typeof window?null:(0,n.jsx)("meta",{name:"«nxt-icon»"})}]);
\ No newline at end of file
diff --git a/src/hyperview/server/static/_next/static/chunks/5f8e11d2476d2135.js b/src/hyperview/server/static/_next/static/chunks/5f8e11d2476d2135.js
deleted file mode 100644
index b255ea0..0000000
--- a/src/hyperview/server/static/_next/static/chunks/5f8e11d2476d2135.js
+++ /dev/null
@@ -1 +0,0 @@
-(globalThis.TURBOPACK||(globalThis.TURBOPACK=[])).push(["object"==typeof document?document.currentScript:void 0,68027,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"default",{enumerable:!0,get:function(){return s}});let n=e.r(43476),o=e.r(12354),i={fontFamily:'system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji"',height:"100vh",textAlign:"center",display:"flex",flexDirection:"column",alignItems:"center",justifyContent:"center"},u={fontSize:"14px",fontWeight:400,lineHeight:"28px",margin:"0 8px"},s=function({error:e}){let t=e?.digest;return(0,n.jsxs)("html",{id:"__next_error__",children:[(0,n.jsx)("head",{}),(0,n.jsxs)("body",{children:[(0,n.jsx)(o.HandleISRError,{error:e}),(0,n.jsx)("div",{style:i,children:(0,n.jsxs)("div",{children:[(0,n.jsxs)("h2",{style:u,children:["Application error: a ",t?"server":"client","-side exception has occurred while loading ",window.location.hostname," (see the"," ",t?"server logs":"browser console"," for more information)."]}),t?(0,n.jsx)("p",{style:u,children:`Digest: ${t}`}):null]})})]})]})};("function"==typeof r.default||"object"==typeof r.default&&null!==r.default)&&void 0===r.default.__esModule&&(Object.defineProperty(r.default,"__esModule",{value:!0}),Object.assign(r.default,r),t.exports=r.default)},35451,(e,t,r)=>{var n={229:function(e){var t,r,n,o=e.exports={};function i(){throw Error("setTimeout has not been defined")}function u(){throw Error("clearTimeout has not been defined")}try{t="function"==typeof setTimeout?setTimeout:i}catch(e){t=i}try{r="function"==typeof clearTimeout?clearTimeout:u}catch(e){r=u}function s(e){if(t===setTimeout)return setTimeout(e,0);if((t===i||!t)&&setTimeout)return t=setTimeout,setTimeout(e,0);try{return t(e,0)}catch(r){try{return t.call(null,e,0)}catch(r){return t.call(this,e,0)}}}var c=[],a=!1,l=-1;function f(){a&&n&&(a=!1,n.length?c=n.concat(c):l=-1,c.length&&p())}function p(){if(!a){var e=s(f);a=!0;for(var t=c.length;t;){for(n=c,c=[];++l1)for(var r=1;r{"use strict";var n,o;t.exports=(null==(n=e.g.process)?void 0:n.env)&&"object"==typeof(null==(o=e.g.process)?void 0:o.env)?e.g.process:e.r(35451)},45689,(e,t,r)=>{"use strict";var n=Symbol.for("react.transitional.element");function o(e,t,r){var o=null;if(void 0!==r&&(o=""+r),void 0!==t.key&&(o=""+t.key),"key"in t)for(var i in r={},t)"key"!==i&&(r[i]=t[i]);else r=t;return{$$typeof:n,type:e,key:o,ref:void 0!==(t=r.ref)?t:null,props:r}}r.Fragment=Symbol.for("react.fragment"),r.jsx=o,r.jsxs=o},43476,(e,t,r)=>{"use strict";t.exports=e.r(45689)},90317,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0});var n={bindSnapshot:function(){return a},createAsyncLocalStorage:function(){return c},createSnapshot:function(){return l}};for(var o in n)Object.defineProperty(r,o,{enumerable:!0,get:n[o]});let i=Object.defineProperty(Error("Invariant: AsyncLocalStorage accessed in runtime where it is not available"),"__NEXT_ERROR_CODE",{value:"E504",enumerable:!1,configurable:!0});class u{disable(){throw i}getStore(){}run(){throw i}exit(){throw i}enterWith(){throw i}static bind(e){return e}}let s="undefined"!=typeof globalThis&&globalThis.AsyncLocalStorage;function c(){return s?new s:new u}function a(e){return s?s.bind(e):u.bind(e)}function l(){return s?s.snapshot():function(e,...t){return e(...t)}}},42344,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"workAsyncStorageInstance",{enumerable:!0,get:function(){return n}});let n=(0,e.r(90317).createAsyncLocalStorage)()},63599,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"workAsyncStorage",{enumerable:!0,get:function(){return n.workAsyncStorageInstance}});let n=e.r(42344)},12354,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"HandleISRError",{enumerable:!0,get:function(){return o}});let n="undefined"==typeof window?e.r(63599).workAsyncStorage:void 0;function o({error:e}){if(n){let t=n.getStore();if(t?.isStaticGeneration)throw e&&console.error(e),e}return null}("function"==typeof r.default||"object"==typeof r.default&&null!==r.default)&&void 0===r.default.__esModule&&(Object.defineProperty(r.default,"__esModule",{value:!0}),Object.assign(r.default,r),t.exports=r.default)},50740,(e,t,r)=>{"use strict";var n=e.i(47167),o=Symbol.for("react.transitional.element"),i=Symbol.for("react.portal"),u=Symbol.for("react.fragment"),s=Symbol.for("react.strict_mode"),c=Symbol.for("react.profiler"),a=Symbol.for("react.consumer"),l=Symbol.for("react.context"),f=Symbol.for("react.forward_ref"),p=Symbol.for("react.suspense"),d=Symbol.for("react.memo"),y=Symbol.for("react.lazy"),h=Symbol.for("react.activity"),g=Symbol.for("react.view_transition"),v=Symbol.iterator,_={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},m=Object.assign,b={};function S(e,t,r){this.props=e,this.context=t,this.refs=b,this.updater=r||_}function O(){}function E(e,t,r){this.props=e,this.context=t,this.refs=b,this.updater=r||_}S.prototype.isReactComponent={},S.prototype.setState=function(e,t){if("object"!=typeof e&&"function"!=typeof e&&null!=e)throw Error("takes an object of state variables to update or a function which returns an object of state variables.");this.updater.enqueueSetState(this,e,t,"setState")},S.prototype.forceUpdate=function(e){this.updater.enqueueForceUpdate(this,e,"forceUpdate")},O.prototype=S.prototype;var T=E.prototype=new O;T.constructor=E,m(T,S.prototype),T.isPureReactComponent=!0;var w=Array.isArray;function j(){}var R={H:null,A:null,T:null,S:null},x=Object.prototype.hasOwnProperty;function A(e,t,r){var n=r.ref;return{$$typeof:o,type:e,key:t,ref:void 0!==n?n:null,props:r}}function P(e){return"object"==typeof e&&null!==e&&e.$$typeof===o}var C=/\/+/g;function H(e,t){var r,n;return"object"==typeof e&&null!==e&&null!=e.key?(r=""+e.key,n={"=":"=0",":":"=2"},"$"+r.replace(/[=:]/g,function(e){return n[e]})):t.toString(36)}function k(e,t,r){if(null==e)return e;var n=[],u=0;return!function e(t,r,n,u,s){var c,a,l,f=typeof t;("undefined"===f||"boolean"===f)&&(t=null);var p=!1;if(null===t)p=!0;else switch(f){case"bigint":case"string":case"number":p=!0;break;case"object":switch(t.$$typeof){case o:case i:p=!0;break;case y:return e((p=t._init)(t._payload),r,n,u,s)}}if(p)return s=s(t),p=""===u?"."+H(t,0):u,w(s)?(n="",null!=p&&(n=p.replace(C,"$&/")+"/"),e(s,r,n,"",function(e){return e})):null!=s&&(P(s)&&(c=s,a=n+(null==s.key||t&&t.key===s.key?"":(""+s.key).replace(C,"$&/")+"/")+p,s=A(c.type,a,c.props)),r.push(s)),1;p=0;var d=""===u?".":u+":";if(w(t))for(var h=0;h{"use strict";t.exports=e.r(50740)},18800,(e,t,r)=>{"use strict";var n=e.r(71645);function o(e){var t="https://react.dev/errors/"+e;if(1{"use strict";!function e(){if("undefined"!=typeof __REACT_DEVTOOLS_GLOBAL_HOOK__&&"function"==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE)try{__REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(e)}catch(e){console.error(e)}}(),t.exports=e.r(18800)}]);
\ No newline at end of file
diff --git a/src/hyperview/server/static/_next/static/chunks/640b68f22e2796e6.js b/src/hyperview/server/static/_next/static/chunks/640b68f22e2796e6.js
deleted file mode 100644
index 9792225..0000000
--- a/src/hyperview/server/static/_next/static/chunks/640b68f22e2796e6.js
+++ /dev/null
@@ -1 +0,0 @@
-(globalThis.TURBOPACK||(globalThis.TURBOPACK=[])).push(["object"==typeof document?document.currentScript:void 0,93347,c=>{c.v(t=>Promise.all(["static/chunks/6dd1fc7c515721e7.js"].map(t=>c.l(t))).then(()=>t(42030)))}]);
\ No newline at end of file
diff --git a/src/hyperview/server/static/_next/static/chunks/6dd1fc7c515721e7.js b/src/hyperview/server/static/_next/static/chunks/6dd1fc7c515721e7.js
deleted file mode 100644
index a7a8e12..0000000
--- a/src/hyperview/server/static/_next/static/chunks/6dd1fc7c515721e7.js
+++ /dev/null
@@ -1,448 +0,0 @@
-(globalThis.TURBOPACK||(globalThis.TURBOPACK=[])).push(["object"==typeof document?document.currentScript:void 0,25443,(e,t,r)=>{e.e,t.exports=function(){"use strict";var e=function(e){return e instanceof Uint8Array||e instanceof Uint16Array||e instanceof Uint32Array||e instanceof Int8Array||e instanceof Int16Array||e instanceof Int32Array||e instanceof Float32Array||e instanceof Float64Array||e instanceof Uint8ClampedArray},t=function(e,t){for(var r=Object.keys(t),n=0;nt.indexOf(e)&&r("invalid value"+i(n)+". must be one of: "+t)}var s=["gl","canvas","container","attributes","pixelRatio","extensions","optionalExtensions","profile","onDone"];function l(e,t){for(e+="";e.lengths.indexOf(e)&&r('invalid regl constructor argument "'+e+'". must be one of '+s)})},type:function(e,t,n){a(e,t)||r("invalid parameter type"+i(n)+". expected "+t+", got "+typeof e)},commandType:v,isTypedArray:function(t,n){e(t)||r("invalid parameter type"+i(n)+". must be a typed array")},nni:function(e,t){e>=0&&(0|e)===e||r("invalid parameter type, ("+e+")"+i(t)+". must be a nonnegative integer")},oneOf:o,shaderError:function(e,t,r,i,a){if(!e.getShaderParameter(t,e.COMPILE_STATUS)){var o,s=e.getShaderInfoLog(t),f=i===e.FRAGMENT_SHADER?"fragment":"vertex";v(r,"string",f+" shader source must be a string",a);var u=m(r,a);(o=[],s.split("\n").forEach(function(e){if(!(e.length<5)){var t=/^ERROR:\s+(\d+):(\d+):\s*(.*)$/.exec(e);t?o.push(new c(0|t[1],0|t[2],t[3].trim())):e.length>0&&o.push(new c("unknown",0,e))}}),o).forEach(function(e){var t=u[e.file];if(t){var r=t.index[e.line];if(r){r.errors.push(e),t.hasErrors=!0;return}}u.unknown.hasErrors=!0,u.unknown.lines[0].errors.push(e)}),Object.keys(u).forEach(function(e){var t=u[e];if(t.hasErrors){var r=[""],n=[""];i("file number "+e+": "+t.name+"\n","color:red;text-decoration:underline;font-weight:bold"),t.lines.forEach(function(e){if(e.errors.length>0){i(l(e.number,4)+"| ","background-color:yellow; font-weight:bold"),i(e.line+"\n","color:red; background-color:yellow; font-weight:bold");var t=0;e.errors.forEach(function(r){var n=r.message,a=/^\s*'(.*)'\s*:\s*(.*)$/.exec(n);if(a){var o=a[1];n=a[2],"assign"===o&&(o="="),t=Math.max(e.line.indexOf(o,t),0)}else t=0;i(l("| ",6)),i(l("^^^",t+3)+"\n","font-weight:bold"),i(l("| ",6)),i(n+"\n","font-weight:bold")}),i(l("| ",6)+"\n")}else i(l(e.number,4)+"| "),i(e.line+"\n","color:red")}),"undefined"==typeof document||window.chrome?console.log(r.join("")):(n[0]=r.join("%c"),console.log.apply(console,n))}function i(e,t){r.push(e),n.push(t||"")}}),n.raise("Error compiling "+f+" shader, "+u[0].name)}},linkError:function(e,t,r,i,a){if(!e.getProgramParameter(t,e.LINK_STATUS)){var o=e.getProgramInfoLog(t),s=m(r,a),l='Error linking program with vertex shader, "'+m(i,a)[0].name+'", and fragment shader "'+s[0].name+'"';"undefined"!=typeof document?console.log("%c"+l+"\n%c"+o,"color:red;text-decoration:underline;font-weight:bold","color:red"):console.log(l+"\n"+o),n.raise(l)}},callSite:p,saveCommandRef:h,saveDrawInfo:function(e,t,r,n){function i(e,t){Object.keys(t).forEach(function(t){e[n.id(t)]=!0})}h(e),e._fragId=(a=e.static.frag)?n.id(a):0,e._vertId=(o=e.static.vert)?n.id(o):0;var a,o,s=e._uniformSet={};i(s,t.static),i(s,t.dynamic);var l=e._attributeSet={};i(l,r.static),i(l,r.dynamic),e._hasCount="count"in e.static||"count"in e.dynamic||"elements"in e.static||"elements"in e.dynamic},framebufferFormat:function(e,t,r){e.texture?o(e.texture._texture.internalformat,t,"unsupported texture format for attachment"):o(e.renderbuffer._renderbuffer.format,r,"unsupported renderbuffer format for attachment")},guessCommand:d,texture2D:function(e,t,r){var i,a=t.width,o=t.height,s=t.channels;n(a>0&&a<=r.maxTextureSize&&o>0&&o<=r.maxTextureSize,"invalid texture shape"),(33071!==e.wrapS||33071!==e.wrapT)&&n(x(a)&&x(o),"incompatible wrap mode for texture, both width and height must be power of 2"),1===t.mipmask?1!==a&&1!==o&&n(9984!==e.minFilter&&9986!==e.minFilter&&9985!==e.minFilter&&9987!==e.minFilter,"min filter requires mipmap"):(n(x(a)&&x(o),"texture must be a square power of 2 to support mipmapping"),n(t.mipmask===(a<<1)-1,"missing or incomplete mipmap data")),5126===t.type&&(0>r.extensions.indexOf("oes_texture_float_linear")&&n(9728===e.minFilter&&9728===e.magFilter,"filter not supported, must enable oes_texture_float_linear"),n(!e.genMipmaps,"mipmap generation not supported with float textures"));var l=t.images;for(i=0;i<16;++i)if(l[i]){var f=a>>i,u=o>>i;n(t.mipmask&1<0&&a<=i.maxTextureSize&&o>0&&o<=i.maxTextureSize,"invalid texture shape"),n(a===o,"cube map must be square"),n(33071===t.wrapS&&33071===t.wrapT,"wrap mode not supported by cube map");for(var l=0;l>c,m=o>>c;n(f.mipmask&1<1&&r===n&&('"'===r||"'"===r))return['"'+E(t.substr(1,t.length-2))+'"'];var i=/\[(false|true|null|\d+|'[^']*'|"[^"]*")\]/.exec(t);if(i)return e(t.substr(0,i.index)).concat(e(i[1])).concat(e(t.substr(i.index+i[0].length)));var a=t.split(".");if(1===a.length)return['"'+E(t)+'"'];for(var o=[],s=0;s65535)<<4,e>>>=t,r=(e>255)<<3,e>>>=r,t|=r,r=(e>15)<<2,e>>>=r,t|=r,r=(e>3)<<1,e>>>=r,(t|=r)|e>>1}function j(){var e=M(8,function(){return[]});function t(t){var r=function(e){for(var t=16;t<=0x10000000;t*=16)if(e<=t)return t;return 0}(t),n=e[I(r)>>2];return n.length>0?n.pop():new ArrayBuffer(r)}function r(t){e[I(t.byteLength)>>2].push(t)}return{alloc:t,free:r,allocType:function(e,r){var n=null;switch(e){case 5120:n=new Int8Array(t(r),0,r);break;case 5121:n=new Uint8Array(t(r),0,r);break;case 5122:n=new Int16Array(t(2*r),0,r);break;case 5123:n=new Uint16Array(t(2*r),0,r);break;case 5124:n=new Int32Array(t(4*r),0,r);break;case 5125:n=new Uint32Array(t(4*r),0,r);break;case 5126:n=new Float32Array(t(4*r),0,r);break;default:return null}return n.length!==r?n.subarray(0,r):n},freeType:function(e){r(e.buffer)}}}var z=j();z.zero=j();var B=function(e,t){var r=1;t.ext_texture_filter_anisotropic&&(r=e.getParameter(34047));var n=1,i=1;t.webgl_draw_buffers&&(n=e.getParameter(34852),i=e.getParameter(36063));var a=!!t.oes_texture_float;if(a){var o=e.createTexture();e.bindTexture(3553,o),e.texImage2D(3553,0,6408,1,1,0,6408,5126,null);var s=e.createFramebuffer();if(e.bindFramebuffer(36160,s),e.framebufferTexture2D(36160,36064,3553,o,0),e.bindTexture(3553,null),36053!==e.checkFramebufferStatus(36160))a=!1;else{e.viewport(0,0,1,1),e.clearColor(1,0,0,1),e.clear(16384);var l=z.allocType(5126,4);e.readPixels(0,0,1,1,6408,5126,l),e.getError()?a=!1:(e.deleteFramebuffer(s),e.deleteTexture(o),a=1===l[0]),z.freeType(l)}}var f="undefined"!=typeof navigator&&(/MSIE/.test(navigator.userAgent)||/Trident\//.test(navigator.appVersion)||/Edge/.test(navigator.userAgent)),u=!0;if(!f){var c=e.createTexture(),d=z.allocType(5121,36);e.activeTexture(33984),e.bindTexture(34067,c),e.texImage2D(34069,0,6408,3,3,0,6408,5121,d),z.freeType(d),e.bindTexture(34067,null),e.deleteTexture(c),u=!e.getError()}return{colorBits:[e.getParameter(3410),e.getParameter(3411),e.getParameter(3412),e.getParameter(3413)],depthBits:e.getParameter(3414),stencilBits:e.getParameter(3415),subpixelBits:e.getParameter(3408),extensions:Object.keys(t).filter(function(e){return!!t[e]}),maxAnisotropic:r,maxDrawbuffers:n,maxColorAttachments:i,pointSizeDims:e.getParameter(33901),lineWidthDims:e.getParameter(33902),maxViewportDims:e.getParameter(3386),maxCombinedTextureUnits:e.getParameter(35661),maxCubeMapSize:e.getParameter(34076),maxRenderbufferSize:e.getParameter(34024),maxTextureUnits:e.getParameter(34930),maxTextureSize:e.getParameter(3379),maxAttributes:e.getParameter(34921),maxVertexUniforms:e.getParameter(36347),maxVertexTextureUnits:e.getParameter(35660),maxVaryingVectors:e.getParameter(36348),maxFragmentUniforms:e.getParameter(36349),glsl:e.getParameter(35724),renderer:e.getParameter(7937),vendor:e.getParameter(7936),version:e.getParameter(7938),readFloat:a,npotTextureCube:u}};function R(t){return!!t&&"object"==typeof t&&Array.isArray(t.shape)&&Array.isArray(t.stride)&&"number"==typeof t.offset&&t.shape.length===t.stride.length&&(Array.isArray(t.data)||e(t.data))}var L=function(e){return Object.keys(e).map(function(t){return e[t]})},F=function(e){for(var t=[],r=e;r.length;r=r[0])t.push(r.length);return t},V=function(e,t,r,n){var i=1;if(t.length)for(var a=0;a>>31<<15,a=(n<<1>>>24)-127,o=n>>13&1023;if(a<-24)t[r]=i;else if(a<-14){var s=-14-a;t[r]=i+(o+1024>>s)}else a>15?t[r]=i+31744:t[r]=i+(a+15<<10)+o}return t}function J(t){return Array.isArray(t)||e(t)}var ee=function(e){return!(e&e-1)&&!!e},et=[9984,9986,9985,9987],er=[0,6409,6410,6407,6408],en={};function ei(e){return"[object "+e+"]"}en[6409]=en[6406]=en[6402]=1,en[34041]=en[6410]=2,en[6407]=en[35904]=3,en[6408]=en[35906]=4;var ea=ei("HTMLCanvasElement"),eo=ei("OffscreenCanvas"),es=ei("CanvasRenderingContext2D"),el=ei("ImageBitmap"),ef=ei("HTMLImageElement"),eu=ei("HTMLVideoElement"),ec=Object.keys(N).concat([ea,eo,es,el,ef,eu]),ed=[];ed[5121]=1,ed[5126]=4,ed[36193]=2,ed[5123]=2,ed[5125]=4;var ep=[];function em(e){return Array.isArray(e)&&(0===e.length||"number"==typeof e[0])}function eh(e){return!!Array.isArray(e)&&0!==e.length&&!!J(e[0])}function ey(e){return Object.prototype.toString.call(e)}function ev(e){if(!e)return!1;var t=ey(e);return ec.indexOf(t)>=0||em(e)||eh(e)||R(e)}function eg(e){return 0|N[Object.prototype.toString.call(e)]}function eb(e,t){return z.allocType(36193===e.type?5126:e.type,t)}function ex(e,t){36193===e.type?(e.data=X(t),z.freeType(t)):e.data=t}function ew(e,t,r,n,i,a){var o;if(o=void 0!==ep[e]?ep[e]:en[e]*ed[t],a&&(o*=6),!i)return o*r*n;for(var s=0,l=r;l>=1;)s+=o*l*l,l/=2;return s}ep[32854]=2,ep[32855]=2,ep[36194]=2,ep[34041]=4,ep[33776]=.5,ep[33777]=.5,ep[33778]=1,ep[33779]=1,ep[35986]=.5,ep[35987]=1,ep[34798]=1,ep[35840]=.5,ep[35841]=.25,ep[35842]=.5,ep[35843]=.25,ep[36196]=.5;var eA=[];eA[32854]=2,eA[32855]=2,eA[36194]=2,eA[33189]=2,eA[36168]=1,eA[34041]=4,eA[35907]=4,eA[34836]=16,eA[34842]=8,eA[34843]=6;var eS=function(e,t,r,n,i){var a={rgba4:32854,rgb565:36194,"rgb5 a1":32855,depth:33189,stencil:36168,"depth stencil":34041};t.ext_srgb&&(a.srgba=35907),t.ext_color_buffer_half_float&&(a.rgba16f=34842,a.rgb16f=34843),t.webgl_color_buffer_float&&(a.rgba32f=34836);var o=[];Object.keys(a).forEach(function(e){o[a[e]]=e});var s=0,l={};function f(e){this.id=s++,this.refCount=1,this.renderbuffer=e,this.format=32854,this.width=0,this.height=0,i.profile&&(this.stats={size:0})}function u(t){var r=t.renderbuffer;w(r,"must not double destroy renderbuffer"),e.bindRenderbuffer(36161,null),e.deleteRenderbuffer(r),t.renderbuffer=null,t.refCount=0,delete l[t.id],n.renderbufferCount--}return f.prototype.decRef=function(){--this.refCount<=0&&u(this)},i.profile&&(n.getTotalRenderbufferSize=function(){var e=0;return Object.keys(l).forEach(function(t){e+=l[t].stats.size}),e}),{create:function(t,s){var u=new f(e.createRenderbuffer());function c(t,n){var s,l,f,d=0,p=0,m=32854;if("object"==typeof t&&t){if("shape"in t){var h=t.shape;w(Array.isArray(h)&&h.length>=2,"invalid renderbuffer shape"),d=0|h[0],p=0|h[1]}else"radius"in t&&(d=p=0|t.radius),"width"in t&&(d=0|t.width),"height"in t&&(p=0|t.height);"format"in t&&(w.parameter(t.format,a,"invalid renderbuffer format"),m=a[t.format])}else"number"==typeof t?(d=0|t,p="number"==typeof n?0|n:d):t?w.raise("invalid arguments to renderbuffer constructor"):d=p=1;if(w(d>0&&p>0&&d<=r.maxRenderbufferSize&&p<=r.maxRenderbufferSize,"invalid renderbuffer size"),d!==u.width||p!==u.height||m!==u.format)return c.width=u.width=d,c.height=u.height=p,u.format=m,e.bindRenderbuffer(36161,u.renderbuffer),e.renderbufferStorage(36161,m,d,p),w(0===e.getError(),"invalid render buffer format"),i.profile&&(u.stats.size=(s=u.format,l=u.width,f=u.height,eA[s]*l*f)),c.format=o[u.format],c}return l[u.id]=u,n.renderbufferCount++,c(t,s),c.resize=function(t,n){var a,o,s,l=0|t,f=0|n||l;return l===u.width&&f===u.height?c:(w(l>0&&f>0&&l<=r.maxRenderbufferSize&&f<=r.maxRenderbufferSize,"invalid renderbuffer size"),c.width=u.width=l,c.height=u.height=f,e.bindRenderbuffer(36161,u.renderbuffer),e.renderbufferStorage(36161,u.format,l,f),w(0===e.getError(),"invalid render buffer format"),i.profile&&(u.stats.size=(a=u.format,o=u.width,s=u.height,eA[a]*o*s)),c)},c._reglType="renderbuffer",c._renderbuffer=u,i.profile&&(c.stats=u.stats),c.destroy=function(){u.decRef()},c},clear:function(){L(l).forEach(u)},restore:function(){L(l).forEach(function(t){t.renderbuffer=e.createRenderbuffer(),e.bindRenderbuffer(36161,t.renderbuffer),e.renderbufferStorage(36161,t.format,t.width,t.height)}),e.bindRenderbuffer(36161,null)}}},eE=[6407,6408],eT=[];eT[6408]=4,eT[6407]=3;var ek=[];ek[5121]=1,ek[5126]=4,ek[36193]=2;var e_=[32854,32855,36194,35907,34842,34843,34836],eO={};eO[36053]="complete",eO[36054]="incomplete attachment",eO[36057]="incomplete dimensions",eO[36055]="incomplete, missing attachment",eO[36061]="unsupported";var eC=["attributes","elements","offset","count","primitive","instances"];function eD(){this.state=0,this.x=0,this.y=0,this.z=0,this.w=0,this.buffer=null,this.size=0,this.normalized=!1,this.type=5126,this.offset=0,this.stride=0,this.divisor=0}function eP(e){return Array.prototype.slice.call(e)}function eM(e){return eP(e).join("")}var eI="xyzw".split(""),ej="dither",ez="blend.enable",eB="blend.color",eR="blend.equation",eL="blend.func",eF="depth.enable",eV="depth.func",e$="depth.range",eN="depth.mask",eW="colorMask",eU="cull.enable",eZ="cull.face",eG="frontFace",eq="lineWidth",eH="polygonOffset.enable",eK="polygonOffset.offset",eY="sample.alpha",eQ="sample.enable",eX="sample.coverage",eJ="stencil.enable",e0="stencil.mask",e1="stencil.func",e2="stencil.opFront",e3="stencil.opBack",e5="scissor.enable",e6="scissor.box",e4="viewport",e8="profile",e7="framebuffer",e9="vert",te="frag",tt="elements",tr="primitive",tn="count",ti="offset",ta="instances",to="Width",ts="Height",tl=e7+to,tf=e7+ts,tu=e4+to,tc=e4+ts,td="drawingBuffer",tp=td+to,tm=td+ts,th=[eL,eR,e1,e2,e3,eX,e4,e6,eK],ty={0:0,1:1,zero:0,one:1,"src color":768,"one minus src color":769,"src alpha":770,"one minus src alpha":771,"dst color":774,"one minus dst color":775,"dst alpha":772,"one minus dst alpha":773,"constant color":32769,"one minus constant color":32770,"constant alpha":32771,"one minus constant alpha":32772,"src alpha saturate":776},tv=["constant color, constant alpha","one minus constant color, constant alpha","constant color, one minus constant alpha","one minus constant color, one minus constant alpha","constant alpha, constant color","constant alpha, one minus constant color","one minus constant alpha, constant color","one minus constant alpha, one minus constant color"],tg={never:512,less:513,"<":513,equal:514,"=":514,"==":514,"===":514,lequal:515,"<=":515,greater:516,">":516,notequal:517,"!=":517,"!==":517,gequal:518,">=":518,always:519},tb={0:0,zero:0,keep:7680,replace:7681,increment:7682,decrement:7683,"increment wrap":34055,"decrement wrap":34056,invert:5386},tx={frag:35632,vert:35633},tw={cw:2304,ccw:2305};function tA(t){return Array.isArray(t)||e(t)||R(t)}function tS(e){return e.sort(function(e,t){return e===e4?-1:t===e4?1:e=1,n>=2,t)}if(4===r){var i=e.data;return new tE(i.thisDep,i.contextDep,i.propDep,t)}if(5===r)return new tE(!1,!1,!1,t);if(6!==r)return new tE(3===r,2===r,1===r,t);for(var a=!1,o=!1,s=!1,l=0;l=1&&(o=!0),u>=2&&(s=!0)}else 4===f.type&&(a=a||f.data.thisDep,o=o||f.data.contextDep,s=s||f.data.propDep)}return new tE(a,o,s,t)}var tO=new tE(!1,!1,!1,function(){}),tC=function(e,t){if(!t.ext_disjoint_timer_query)return null;var r=[],n=[];function i(){this.startQueryIndex=-1,this.endQueryIndex=-1,this.sum=0,this.stats=null}var a=[],o=[];function s(e,t,r){var n=a.pop()||new i;n.startQueryIndex=e,n.endQueryIndex=t,n.sum=0,n.stats=r,o.push(n)}var l=[],f=[];return{beginQuery:function(e){var i=r.pop()||t.ext_disjoint_timer_query.createQueryEXT();t.ext_disjoint_timer_query.beginQueryEXT(35007,i),n.push(i),s(n.length-1,n.length,e)},endQuery:function(){t.ext_disjoint_timer_query.endQueryEXT(35007)},pushScopeStats:s,update:function(){var e,i,s=n.length;if(0!==s){f.length=Math.max(f.length,s+1),l.length=Math.max(l.length,s+1),l[0]=0,f[0]=0;var u=0;for(i=0,e=0;i0,"invalid pixel ratio")):w.raise("invalid arguments to regl"),r&&("canvas"===r.nodeName.toLowerCase()?i=r:n=r),!a){if(!i){w("undefined"!=typeof document,"must manually specify webgl context outside of DOM environments");var m=function(e,r,n){var i,a=document.createElement("canvas");function o(){var t=window.innerWidth,r=window.innerHeight;if(e!==document.body){var i=a.getBoundingClientRect();t=i.right-i.left,r=i.bottom-i.top}a.width=n*t,a.height=n*r}return t(a.style,{border:0,margin:0,padding:0,top:0,left:0,width:"100%",height:"100%"}),e.appendChild(a),e===document.body&&(a.style.position="absolute",t(e.style,{margin:0,padding:0})),e!==document.body&&"function"==typeof ResizeObserver?(i=new ResizeObserver(function(){setTimeout(o)})).observe(e):window.addEventListener("resize",o,!1),o(),{canvas:a,onDestroy:function(){i?i.disconnect():window.removeEventListener("resize",o),e.removeChild(a)}}}(n||document.body,0,u);if(!m)return null;i=m.canvas,p=m.onDestroy}void 0===s.premultipliedAlpha&&(s.premultipliedAlpha=!0),a=function(e,t){function r(r){try{return e.getContext(r,t)}catch(e){return null}}return r("webgl")||r("experimental-webgl")||r("webgl-experimental")}(i,s)}return a?{gl:a,canvas:i,container:n,extensions:l,optionalExtensions:f,pixelRatio:u,profile:c,onDone:d,onDestroy:p}:(p(),d("webgl not supported, try upgrading your browser or graphics drivers http://get.webgl.org"),null)}(r);if(!a)return null;var o=a.gl,s=o.getContextAttributes(),l=o.isContextLost(),f=function(e,t){var r={};function n(t){w.type(t,"string","extension name must be string");var n,i=t.toLowerCase();try{n=r[i]=e.getExtension(i)}catch(e){}return!!n}for(var i=0;i0)if(Array.isArray(r[0])){for(var s,l=F(r),u=1,c=1;c0)if("number"==typeof t[0]){var a=z.allocType(d.dtype,t.length);q(a,t),m(a,i),z.freeType(a)}else if(Array.isArray(t[0])||e(t[0])){n=F(t);var o=V(t,n,d.dtype);m(o,i),z.freeType(o)}else w.raise("invalid buffer data")}else if(R(t)){n=t.shape;var s=t.stride,l=0,f=0,u=0,c=0;1===n.length?(l=n[0],f=1,u=s[0],c=0):2===n.length?(l=n[0],f=n[1],u=s[0],c=s[1]):w.raise("invalid shape");var h=Array.isArray(t.data)?d.dtype:G(t.data),y=z.allocType(h,l*f);H(y,t.data,l,f,u,c,t.offset),m(y,i),z.freeType(y)}else w.raise("invalid data for buffer subdata");return p},n.profile&&(p.stats=d.stats),p.destroy=function(){c(d)},p},createStream:function(e,t){var r=l.pop();return r||(r=new s(e)),r.bind(),u(r,t,35040,0,1,!1),r},destroyStream:function(e){l.push(e)},clear:function(){L(o).forEach(c),l.forEach(c)},getBuffer:function(e){return e&&e._buffer instanceof s?e._buffer:null},restore:function(){L(o).forEach(function(e){e.buffer=t.createBuffer(),t.bindBuffer(e.type,e.buffer),t.bufferData(e.type,e.persistentData||e.byteLength,e.usage)})},_initBuffer:u}}(o,c,a,function(e){return E.destroyBuffer(e)}),A=function(t,r,n,i){var a={},o=0,s={uint8:5121,uint16:5123};function l(e){this.id=o++,a[this.id]=this,this.buffer=e,this.primType=4,this.vertCount=0,this.type=0}r.oes_element_index_uint&&(s.uint32=5125),l.prototype.bind=function(){this.buffer.bind()};var f=[];function u(i,a,o,s,l,f,u){if(i.buffer.bind(),a){var c,d=u;!u&&(!e(a)||R(a)&&!e(a.data))&&(d=r.oes_element_index_uint?5125:5123),n._initBuffer(i.buffer,a,o,d,3)}else t.bufferData(34963,f,o),i.buffer.dtype=c||5121,i.buffer.usage=o,i.buffer.dimension=3,i.buffer.byteLength=f;if(c=u,!u){switch(i.buffer.dtype){case 5121:case 5120:c=5121;break;case 5123:case 5122:c=5123;break;case 5125:case 5124:c=5125;break;default:w.raise("unsupported type for element array")}i.buffer.dtype=c}i.type=c,w(5125!==c||!!r.oes_element_index_uint,"32 bit element buffers not supported, enable oes_element_index_uint first");var p=l;p<0&&(p=i.buffer.byteLength,5123===c?p>>=1:5125===c&&(p>>=2)),i.vertCount=p;var m=s;if(s<0){m=4;var h=i.buffer.dimension;1===h&&(m=0),2===h&&(m=1),3===h&&(m=4)}i.primType=m}function c(e){i.elementsCount--,w(null!==e.buffer,"must not double destroy elements"),delete a[e.id],e.buffer.destroy(),e.buffer=null}return{create:function(t,r){var a=n.create(null,34963,!0),o=new l(a._buffer);function f(t){if(t)if("number"==typeof t)a(t),o.primType=4,o.vertCount=0|t,o.type=5121;else{var r=null,n=35044,i=-1,l=-1,c=0,d=0;Array.isArray(t)||e(t)||R(t)?r=t:(w.type(t,"object","invalid arguments for elements"),"data"in t&&w(Array.isArray(r=t.data)||e(r)||R(r),"invalid data for element buffer"),"usage"in t&&(w.parameter(t.usage,U,"invalid element buffer usage"),n=U[t.usage]),"primitive"in t&&(w.parameter(t.primitive,K,"invalid element buffer primitive"),i=K[t.primitive]),"count"in t&&(w("number"==typeof t.count&&t.count>=0,"invalid vertex count for elements"),l=0|t.count),"type"in t&&(w.parameter(t.type,s,"invalid buffer type"),d=s[t.type]),"length"in t?c=0|t.length:(c=l,5123===d||5122===d?c*=2:(5125===d||5124===d)&&(c*=4))),u(o,r,n,i,l,c,d)}else a(),o.primType=4,o.vertCount=0,o.type=5121;return f}return i.elementsCount++,f(t),f._reglType="elements",f._elements=o,f.subdata=function(e,t){return a.subdata(e,t),f},f.destroy=function(){c(o)},f},createStream:function(e){var t=f.pop();return t||(t=new l(n.create(null,34963,!0,!1)._buffer)),u(t,e,35040,-1,-1,0,0),t},destroyStream:function(e){f.push(e)},getElements:function(e){return"function"==typeof e&&e._elements instanceof l?e._elements:null},clear:function(){L(a).forEach(c)}}}(o,d,x,c),E=function(t,r,n,i,a,o,s){for(var l=n.maxAttributes,f=Array(l),u=0;u{for(var e=Object.keys(t),r=0;r=0,'invalid option for vao: "'+e[r]+'" valid options are '+eC)}),w(Array.isArray(d),"attributes must be an array")}w(d.length0,"must specify at least one attribute");var f={},u=n.attributes;u.length=d.length;for(var c=0;c=y.byteLength?p.subdata(y):(p.destroy(),n.buffers[c]=null)),n.buffers[c]||(p=n.buffers[c]=a.create(m,34962,!1,!0)),h.buffer=a.getBuffer(p),h.size=0|h.buffer.dimension,h.normalized=!1,h.type=h.buffer.dtype,h.offset=0,h.stride=0,h.divisor=0,h.state=1,f[c]=1):a.getBuffer(m)?(h.buffer=a.getBuffer(m),h.size=0|h.buffer.dimension,h.normalized=!1,h.type=h.buffer.dtype,h.offset=0,h.stride=0,h.divisor=0,h.state=1):a.getBuffer(m.buffer)?(h.buffer=a.getBuffer(m.buffer),h.size=0|(+m.size||h.buffer.dimension),h.normalized=!!m.normalized,"type"in m?(w.parameter(m.type,W,"invalid buffer type"),h.type=W[m.type]):h.type=h.buffer.dtype,h.offset=0|(m.offset||0),h.stride=0|(m.stride||0),h.divisor=0|(m.divisor||0),h.state=1,w(h.size>=1&&h.size<=4,"size must be between 1 and 4"),w(h.offset>=0,"invalid offset"),w(h.stride>=0&&h.stride<=255,"stride must be between 0 and 255"),w(h.divisor>=0,"divisor must be positive"),w(!h.divisor||!!r.angle_instanced_arrays,"ANGLE_instanced_arrays must be enabled to use divisor")):"x"in m?(w(c>0,"first attribute must not be a constant"),h.x=+m.x||0,h.y=+m.y||0,h.z=+m.z||0,h.w=+m.w||0,h.state=2):w(!1,"invalid attribute spec for location "+c)}for(var v=0;v1)for(var v=0;ve&&(e=t.stats.uniformsCount)}),e},n.getMaxAttributesCount=function(){var e=0;return c.forEach(function(t){t.stats.attributesCount>e&&(e=t.stats.attributesCount)}),e}),{clear:function(){var t=e.deleteShader.bind(e);L(a).forEach(t),a={},L(o).forEach(t),o={},c.forEach(function(t){e.deleteProgram(t.program)}),c.length=0,u={},n.shaderCount=0},program:function(r,i,s,l){w.command(r>=0,"missing vertex shader",s),w.command(i>=0,"missing fragment shader",s);var f=u[i];f||(f=u[i]={});var d=f[r];if(d&&(d.refCount++,!l))return d;var h=new p(i,r);return n.shaderCount++,m(h,s,l),d||(f[r]=h),c.push(h),t(h,{destroy:function(){if(h.refCount--,h.refCount<=0){e.deleteProgram(h.program);var t=c.indexOf(h);c.splice(t,1),n.shaderCount--}f[h.vertId].refCount<=0&&(e.deleteShader(o[h.vertId]),delete o[h.vertId],delete u[h.fragId][h.vertId]),Object.keys(u[h.fragId]).length||(e.deleteShader(a[h.fragId]),delete a[h.fragId],delete u[h.fragId])}})},restore:function(){a={},o={};for(var e=0;e=0&&(h[e]=t)});var g=Object.keys(h);i.textureFormats=g;var b=[];Object.keys(h).forEach(function(e){b[h[e]]=e});var x=[];Object.keys(m).forEach(function(e){x[m[e]]=e});var A=[];Object.keys(c).forEach(function(e){A[c[e]]=e});var S=[];Object.keys(d).forEach(function(e){S[d[e]]=e});var E=[];Object.keys(u).forEach(function(e){E[u[e]]=e});var T=g.reduce(function(e,t){var r=h[t];return 6409===r||6406===r||6409===r||6410===r||6402===r||34041===r||n.ext_srgb&&(35904===r||35906===r)?e[r]=r:32855===r||t.indexOf("rgba")>=0?e[r]=6408:e[r]=6407,e},{});function k(){this.internalformat=6408,this.format=6408,this.type=5121,this.compressed=!1,this.premultiplyAlpha=!1,this.flipY=!1,this.unpackAlignment=1,this.colorSpace=37444,this.width=0,this.height=0,this.channels=0}function _(e,t){e.internalformat=t.internalformat,e.format=t.format,e.type=t.type,e.compressed=t.compressed,e.premultiplyAlpha=t.premultiplyAlpha,e.flipY=t.flipY,e.unpackAlignment=t.unpackAlignment,e.colorSpace=t.colorSpace,e.width=t.width,e.height=t.height,e.channels=t.channels}function O(e,t){if("object"==typeof t&&t){if("premultiplyAlpha"in t&&(w.type(t.premultiplyAlpha,"boolean","invalid premultiplyAlpha"),e.premultiplyAlpha=t.premultiplyAlpha),"flipY"in t&&(w.type(t.flipY,"boolean","invalid texture flip"),e.flipY=t.flipY),"alignment"in t&&(w.oneOf(t.alignment,[1,2,4,8],"invalid texture unpack alignment"),e.unpackAlignment=t.alignment),"colorSpace"in t&&(w.parameter(t.colorSpace,p,"invalid colorSpace"),e.colorSpace=p[t.colorSpace]),"type"in t){var r=t.type;w(n.oes_texture_float||"float"!==r&&"float32"!==r,"you must enable the OES_texture_float extension in order to use floating point textures."),w(n.oes_texture_half_float||"half float"!==r&&"float16"!==r,"you must enable the OES_texture_half_float extension in order to use 16-bit floating point textures."),w(n.webgl_depth_texture||"uint16"!==r&&"uint32"!==r&&"depth stencil"!==r,"you must enable the WEBGL_depth_texture extension in order to use depth/stencil textures."),w.parameter(r,m,"invalid texture type"),e.type=m[r]}var a=e.width,o=e.height,s=e.channels,l=!1;"shape"in t?(w(Array.isArray(t.shape)&&t.shape.length>=2,"shape must be an array"),a=t.shape[0],o=t.shape[1],3===t.shape.length&&(w((s=t.shape[2])>0&&s<=4,"invalid number of channels"),l=!0),w(a>=0&&a<=i.maxTextureSize,"invalid width"),w(o>=0&&o<=i.maxTextureSize,"invalid height")):("radius"in t&&w((a=o=t.radius)>=0&&a<=i.maxTextureSize,"invalid radius"),"width"in t&&w((a=t.width)>=0&&a<=i.maxTextureSize,"invalid width"),"height"in t&&w((o=t.height)>=0&&o<=i.maxTextureSize,"invalid height"),"channels"in t&&(w((s=t.channels)>0&&s<=4,"invalid number of channels"),l=!0)),e.width=0|a,e.height=0|o,e.channels=0|s;var f=!1;if("format"in t){var u=t.format;w(n.webgl_depth_texture||"depth"!==u&&"depth stencil"!==u,"you must enable the WEBGL_depth_texture extension in order to use depth/stencil textures."),w.parameter(u,h,"invalid texture format");var c=e.internalformat=h[u];e.format=T[c],u in m&&!("type"in t)&&(e.type=m[u]),u in y&&(e.compressed=!0),f=!0}!l&&f?e.channels=en[e.format]:l&&!f?e.channels!==er[e.format]&&(e.format=e.internalformat=er[e.channels]):f&&l&&w(e.channels===en[e.format],"number of channels inconsistent with specified format")}}function C(e){r.pixelStorei(37440,e.flipY),r.pixelStorei(37441,e.premultiplyAlpha),r.pixelStorei(37443,e.colorSpace),r.pixelStorei(3317,e.unpackAlignment)}function D(){k.call(this),this.xOffset=0,this.yOffset=0,this.data=null,this.needsFree=!1,this.element=null,this.needsCopy=!1}function P(t,r){var n=null;if(ev(r)?n=r:r&&(w.type(r,"object","invalid pixel data type"),O(t,r),"x"in r&&(t.xOffset=0|r.x),"y"in r&&(t.yOffset=0|r.y),ev(r.data)&&(n=r.data)),w(!t.compressed||n instanceof Uint8Array,"compressed texture data must be stored in a uint8array"),r.copy){w(!n,"can not specify copy and data field for the same texture");var a=o.viewportWidth,s=o.viewportHeight;t.width=t.width||a-t.xOffset,t.height=t.height||s-t.yOffset,t.needsCopy=!0,w(t.xOffset>=0&&t.xOffset=0&&t.yOffset0&&t.width<=a&&t.height>0&&t.height<=s,"copy texture read out of bounds")}else if(n){if(e(n))t.channels=t.channels||4,t.data=n,"type"in r||5121!==t.type||(t.type=eg(n));else if(em(n)){t.channels=t.channels||4;var l=n,f=l.length;switch(t.type){case 5121:case 5123:case 5125:case 5126:var u=z.allocType(t.type,f);u.set(l),t.data=u;break;case 36193:t.data=X(l);break;default:w.raise("unsupported texture type, must specify a typed array")}t.alignment=1,t.needsFree=!0}else if(R(n)){var c,d,p,m,h,y,v=n.data;Array.isArray(v)||5121!==t.type||(t.type=eg(v));var g=n.shape,b=n.stride;3===g.length?(p=g[2],y=b[2]):(w(2===g.length,"invalid ndarray pixel data, must be 2 or 3D"),p=1,y=1),c=g[0],d=g[1],m=b[0],h=b[1],t.alignment=1,t.width=c,t.height=d,t.channels=p,t.format=t.internalformat=er[p],t.needsFree=!0,function(e,t,r,n,i,a){for(var o=e.width,s=e.height,l=e.channels,f=eb(e,o*s*l),u=0,c=0;c=0,"oes_texture_float extension not enabled"):36193===t.type&&w(i.extensions.indexOf("oes_texture_half_float")>=0,"oes_texture_half_float extension not enabled")}function M(e,t,n,i,o){var s=e.element,l=e.data,f=e.internalformat,u=e.format,c=e.type,d=e.width,p=e.height;C(e),s?r.texSubImage2D(t,o,n,i,u,c,s):e.compressed?r.compressedTexSubImage2D(t,o,n,i,f,d,p,l):e.needsCopy?(a(),r.copyTexSubImage2D(t,o,n,i,e.xOffset,e.yOffset,d,p)):r.texSubImage2D(t,o,n,i,d,p,u,c,l)}var I=[];function j(){return I.pop()||new D}function B(e){e.needsFree&&z.freeType(e.data),D.call(e),I.push(e)}function $(){k.call(this),this.genMipmaps=!1,this.mipmapHint=4352,this.mipmask=0,this.images=Array(16)}function N(e,t,r){var n=e.images[0]=j();e.mipmask=1,n.width=e.width=t,n.height=e.height=r,n.channels=e.channels=4}function W(e,t){var r=null;if(ev(t))_(r=e.images[0]=j(),e),P(r,t),e.mipmask=1;else if(O(e,t),Array.isArray(t.mipmap))for(var n=t.mipmap,i=0;i>=i,r.height>>=i,P(r,n[i]),e.mipmask|=1<=0)||"faces"in t||(e.genMipmaps=!0)}if("mag"in t){var n=t.mag;w.parameter(n,c),e.magFilter=c[n]}var a=e.wrapS,o=e.wrapT;if("wrap"in t){var s=t.wrap;"string"==typeof s?(w.parameter(s,u),a=o=u[s]):Array.isArray(s)&&(w.parameter(s[0],u),w.parameter(s[1],u),a=u[s[0]],o=u[s[1]])}else{if("wrapS"in t){var l=t.wrapS;w.parameter(l,u),a=u[l]}if("wrapT"in t){var p=t.wrapT;w.parameter(p,u),o=u[p]}}if(e.wrapS=a,e.wrapT=o,"anisotropic"in t){var m=t.anisotropic;w("number"==typeof m&&m>=1&&m<=i.maxAnisotropic,"aniso samples must be between 1 and "),e.anisotropic=t.anisotropic}if("mipmap"in t){var h=!1;switch(typeof t.mipmap){case"string":w.parameter(t.mipmap,f,"invalid mipmap hint"),e.mipmapHint=f[t.mipmap],e.genMipmaps=!0,h=!0;break;case"boolean":h=e.genMipmaps=t.mipmap;break;case"object":w(Array.isArray(t.mipmap),"invalid mipmap type"),e.genMipmaps=!1,h=!0;break;default:w.raise("invalid mipmap type")}!h||"min"in t||(e.minFilter=9984)}}function Y(e,t){r.texParameteri(t,10241,e.minFilter),r.texParameteri(t,10240,e.magFilter),r.texParameteri(t,10242,e.wrapS),r.texParameteri(t,10243,e.wrapT),n.ext_texture_filter_anisotropic&&r.texParameteri(t,34046,e.anisotropic),e.genMipmaps&&(r.hint(33170,e.mipmapHint),r.generateMipmap(t))}var Q=0,ei={},ec=i.maxTextureUnits,ed=Array(ec).map(function(){return null});function ep(e){k.call(this),this.mipmask=0,this.internalformat=6408,this.id=Q++,this.refCount=1,this.target=e,this.texture=r.createTexture(),this.unit=-1,this.bindCount=0,this.texInfo=new H,l.profile&&(this.stats={size:0})}function eA(e){r.activeTexture(33984),r.bindTexture(e.target,e.texture)}function eS(){var e=ed[0];e?r.bindTexture(e.target,e.texture):r.bindTexture(3553,null)}function eE(e){var t=e.texture;w(t,"must not double destroy texture");var n=e.unit,i=e.target;n>=0&&(r.activeTexture(33984+n),r.bindTexture(i,null),ed[n]=null),r.deleteTexture(t),e.texture=null,e.params=null,e.pixels=null,e.refCount=0,delete ei[e.id],s.textureCount--}return t(ep.prototype,{bind:function(){this.bindCount+=1;var e=this.unit;if(e<0){for(var t=0;t0)continue;n.unit=-1}ed[t]=this,e=t;break}e>=ec&&w.raise("insufficient number of texture units"),l.profile&&s.maxTextureUnits>l)-o,f.height=f.height||(n.height>>l)-s,w(n.type===f.type&&n.format===f.format&&n.internalformat===f.internalformat,"incompatible format for texture.subimage"),w(o>=0&&s>=0&&o+f.width<=n.width&&s+f.height<=n.height,"texture.subimage write out of bounds"),w(n.mipmask&1<>s;++s){var f=i>>s,u=o>>s;if(!f||!u)break;r.texImage2D(3553,s,n.format,f,u,0,n.format,n.type,null)}return eS(),l.profile&&(n.stats.size=ew(n.internalformat,n.type,i,o,!1,!1)),a},a._reglType="texture2d",a._texture=n,l.profile&&(a.stats=n.stats),a.destroy=function(){n.decRef()},a},createCube:function(e,t,n,a,o,f){var u=new ep(34067);ei[u.id]=u,s.cubeCount++;var c=Array(6);function d(e,t,r,n,a,o){var s,f=u.texInfo;for(H.call(f),s=0;s<6;++s)c[s]=G();if("number"!=typeof e&&e)if("object"==typeof e)if(t)W(c[0],e),W(c[1],t),W(c[2],r),W(c[3],n),W(c[4],a),W(c[5],o);else if(K(f,e),O(u,e),"faces"in e){var p=e.faces;for(w(Array.isArray(p)&&6===p.length,"cube faces must be a length 6 array"),s=0;s<6;++s)w("object"==typeof p[s]&&!!p[s],"invalid input for cube map face"),_(c[s],u),W(c[s],p[s])}else for(s=0;s<6;++s)W(c[s],e);else w.raise("invalid arguments to cube map");else{var m=0|e||1;for(s=0;s<6;++s)N(c[s],m,m)}for(_(u,c[0]),w.optional(function(){i.npotTextureCube||w(ee(u.width)&&ee(u.height),"your browser does not support non power or two texture dimensions")}),f.genMipmaps?u.mipmask=(c[0].width<<1)-1:u.mipmask=c[0].mipmask,w.textureCube(u,f,c,i),u.internalformat=c[0].internalformat,d.width=c[0].width,d.height=c[0].height,eA(u),s=0;s<6;++s)U(c[s],34069+s);for(Y(f,34067),eS(),l.profile&&(u.stats.size=ew(u.internalformat,u.type,d.width,d.height,f.genMipmaps,!0)),d.format=b[u.internalformat],d.type=x[u.type],d.mag=A[f.magFilter],d.min=S[f.minFilter],d.wrapS=E[f.wrapS],d.wrapT=E[f.wrapT],s=0;s<6;++s)q(c[s]);return d}return d(e,t,n,a,o,f),d.subimage=function(e,t,r,n,i){w(!!t,"must specify image data"),w("number"==typeof e&&e===(0|e)&&e>=0&&e<6,"invalid face");var a=0|r,o=0|n,s=0|i,l=j();return _(l,u),l.width=0,l.height=0,P(l,t),l.width=l.width||(u.width>>s)-a,l.height=l.height||(u.height>>s)-o,w(u.type===l.type&&u.format===l.format&&u.internalformat===l.internalformat,"incompatible format for texture.subimage"),w(a>=0&&o>=0&&a+l.width<=u.width&&o+l.height<=u.height,"texture.subimage write out of bounds"),w(u.mipmask&1<>i;++i)r.texImage2D(34069+n,i,u.format,t>>i,t>>i,0,u.format,u.type,null);return eS(),l.profile&&(u.stats.size=ew(u.internalformat,u.type,d.width,d.height,!1,!0)),d}},d._reglType="textureCube",d._texture=u,l.profile&&(d.stats=u.stats),d.destroy=function(){u.decRef()},d},clear:function(){for(var e=0;e>t,e.height>>t,0,e.internalformat,e.type,null);else for(var n=0;n<6;++n)r.texImage2D(34069+n,t,e.internalformat,e.width>>t,e.height>>t,0,e.internalformat,e.type,null);Y(e.texInfo,e.target)})},refresh:function(){for(var e=0;e=34069&&t<34075,"invalid cube map target")):"renderbuffer"===a?(n=i,t=36161):w.raise("invalid regl object for attachment"),new c(t,r,n)}function y(e,t,r,n,o){if(r){var s=i.create2D({width:e,height:t,format:n,type:o});return s._texture.refCount=0,new c(3553,s,null)}var l=a.create({width:e,height:t,format:n});return l._renderbuffer.refCount=0,new c(36161,null,l)}function v(e){return e&&(e.texture||e.renderbuffer)}function g(e,t,r){e&&(e.texture?e.texture.resize(t,r):e.renderbuffer&&e.renderbuffer.resize(t,r),e.width=t,e.height=r)}r.oes_texture_half_float&&u.push("half float","float16"),r.oes_texture_float&&u.push("float","float32");var b=0,x={};function A(){this.id=b++,x[this.id]=this,this.framebuffer=e.createFramebuffer(),this.width=0,this.height=0,this.colorAttachments=[],this.depthAttachment=null,this.stencilAttachment=null,this.depthStencilAttachment=null}function S(e){e.colorAttachments.forEach(d),d(e.depthAttachment),d(e.stencilAttachment),d(e.depthStencilAttachment)}function E(t){var r=t.framebuffer;w(r,"must not double destroy framebuffer"),e.deleteFramebuffer(r),t.framebuffer=null,o.framebufferCount--,delete x[t.id]}function T(t){e.bindFramebuffer(36160,t.framebuffer);var r,i=t.colorAttachments;for(r=0;r=2,"invalid shape for framebuffer"),o=P[0],d=P[1]}else"radius"in e&&(o=d=e.radius),"width"in e&&(o=e.width),"height"in e&&(d=e.height);("color"in e||"colors"in e)&&Array.isArray(b=e.color||e.colors)&&w(1===b.length||r.webgl_draw_buffers,"multiple render targets not supported"),!b&&("colorCount"in e&&w((k=0|e.colorCount)>0,"invalid color buffer count"),"colorTexture"in e&&(x=!!e.colorTexture,A="rgba4"),"colorType"in e&&(E=e.colorType,x?(w(r.oes_texture_float||"float"!==E&&"float32"!==E,"you must enable OES_texture_float in order to use floating point framebuffer objects"),w(r.oes_texture_half_float||"half float"!==E&&"float16"!==E,"you must enable OES_texture_half_float in order to use 16-bit floating point framebuffer objects")):"half float"===E||"float16"===E?(w(r.ext_color_buffer_half_float,"you must enable EXT_color_buffer_half_float to use 16-bit render buffers"),A="rgba16f"):("float"===E||"float32"===E)&&(w(r.webgl_color_buffer_float,"you must enable WEBGL_color_buffer_float in order to use 32-bit floating point renderbuffers"),A="rgba32f"),w.oneOf(E,u,"invalid color type")),"colorFormat"in e&&(A=e.colorFormat,l.indexOf(A)>=0?x=!0:f.indexOf(A)>=0?x=!1:w.optional(function(){x?w.oneOf(e.colorFormat,l,"invalid color format for texture"):w.oneOf(e.colorFormat,f,"invalid color format for renderbuffer")}))),("depthTexture"in e||"depthStencilTexture"in e)&&w(!(D=!!(e.depthTexture||e.depthStencilTexture))||r.webgl_depth_texture,"webgl_depth_texture extension not supported"),"depth"in e&&("boolean"==typeof e.depth?m=e.depth:(_=e.depth,g=!1)),"stencil"in e&&("boolean"==typeof e.stencil?g=e.stencil:(O=e.stencil,m=!1)),"depthStencil"in e&&("boolean"==typeof e.depthStencil?m=g=e.depthStencil:(C=e.depthStencil,m=!1,g=!1))}else o=d=1;var M=null,I=null,j=null,z=null;if(Array.isArray(b))M=b.map(h);else if(b)M=[h(b)];else for(i=0,M=Array(k);i=0||M[i].renderbuffer&&e_.indexOf(M[i].renderbuffer._renderbuffer.format)>=0,"framebuffer color attachment "+i+" is invalid"),M[i]&&M[i].texture){var R=eT[M[i].texture._texture.format]*ek[M[i].texture._texture.type];null===B?B=R:w(B===R,"all color attachments much have the same number of bits per pixel.")}return p(I,o,d),w(!I||I.texture&&6402===I.texture._texture.format||I.renderbuffer&&33189===I.renderbuffer._renderbuffer.format,"invalid depth attachment for framebuffer object"),p(j,o,d),w(!j||j.renderbuffer&&36168===j.renderbuffer._renderbuffer.format,"invalid stencil attachment for framebuffer object"),p(z,o,d),w(!z||z.texture&&34041===z.texture._texture.format||z.renderbuffer&&34041===z.renderbuffer._renderbuffer.format,"invalid depth-stencil attachment for framebuffer object"),S(a),a.width=o,a.height=d,a.colorAttachments=M,a.depthAttachment=I,a.stencilAttachment=j,a.depthStencilAttachment=z,c.color=M.map(v),c.depth=v(I),c.stencil=v(j),c.depthStencil=v(z),c.width=a.width,c.height=a.height,T(a),c}return o.framebufferCount++,c(e,i),t(c,{resize:function(e,t){w(s.next!==a,"can not resize a framebuffer which is currently in use");var r=Math.max(0|e,1),n=Math.max(0|t||r,1);if(r===a.width&&n===a.height)return c;for(var i=a.colorAttachments,o=0;oa.indexOf(s.next),"can not update framebuffer which is currently in use");var n,f,c={color:null},d=0,p=null,m="rgba",h="uint8",y=1;if("number"==typeof e)d=0|e;else if(e){if(w.type(e,"object","invalid arguments for framebuffer"),"shape"in e){var v=e.shape;w(Array.isArray(v)&&v.length>=2,"invalid shape for framebuffer"),w(v[0]===v[1],"cube framebuffer must be square"),d=v[0]}else"radius"in e&&(d=0|e.radius),"width"in e?(d=0|e.width,"height"in e&&w(e.height===d,"must be square")):"height"in e&&(d=0|e.height);("color"in e||"colors"in e)&&Array.isArray(p=e.color||e.colors)&&w(1===p.length||r.webgl_draw_buffers,"multiple render targets not supported"),!p&&("colorCount"in e&&w((y=0|e.colorCount)>0,"invalid color buffer count"),"colorType"in e&&(w.oneOf(e.colorType,u,"invalid color type"),h=e.colorType),"colorFormat"in e&&(m=e.colorFormat,w.oneOf(e.colorFormat,l,"invalid color format for texture"))),"depth"in e&&(c.depth=e.depth),"stencil"in e&&(c.stencil=e.stencil),"depthStencil"in e&&(c.depthStencil=e.depthStencil)}else d=1;if(p)if(Array.isArray(p))for(n=0,f=[];n0&&(c.depth=a[0].depth,c.stencil=a[0].stencil,c.depthStencil=a[0].depthStencil),a[n]?a[n](c):a[n]=k(c)}return t(o,{width:d,height:d,color:f})}return o(e),t(o,{faces:a,resize:function(e){var t,r=0|e;if(w(r>0&&r<=n.maxCubeMapSize,"invalid radius for cube fbo"),r===o.width)return o;var i=o.color;for(t=0;t0&&(r.push(t,"="),r.push.apply(r,eP(arguments)),r.push(";")),t},toString:function(){return eM([n.length>0?"var "+n.join(",")+";":"",eM(r)])}})}function a(){var e=i(),r=i(),n=e.toString,a=r.toString;function o(t,n){r(t,n,"=",e.def(t,n),";")}return t(function(){e.apply(e,eP(arguments))},{def:e.def,entry:e,exit:r,save:o,set:function(t,r,n){o(t,r),e(t,r,"=",n,";")},toString:function(){return n()+a()}})}var o=i(),s={};return{global:o,link:function(t){for(var i=0;i1){for(var k=[],_=[],O=0;O=0","missing vertex count")})):(s=f.def(m,".",tn),w.optional(function(){e.assert(f,s+">=0","missing vertex count")})),s);if("number"==typeof S){if(0===S)return}else r("if(",S,"){"),r.exit("}");g&&(u=v(ta),c=e.instancing);var E=y+".type",T=h.elements&&tT(h.elements)&&!h.vaoActive;function k(){function e(){r(c,".drawElementsInstancedANGLE(",[b,S,E,A+"<<(("+E+"-5121)>>1)",u],");")}function t(){r(c,".drawArraysInstancedANGLE(",[b,A,S,u],");")}y&&"null"!==y?T?e():(r("if(",y,"){"),e(),r("}else{"),t(),r("}")):t()}function _(){function e(){r(p+".drawElements("+[b,S,E,A+"<<(("+E+"-5121)>>1)"]+");")}function t(){r(p+".drawArrays("+[b,A,S]+");")}y&&"null"!==y?T?e():(r("if(",y,"){"),e(),r("}else{"),t(),r("}")):t()}g&&("number"!=typeof u||u>=0)?"string"==typeof u?(r("if(",u,">0){"),k(),r("}else if(",u,"<0){"),_(),r("}")):k():_()}function H(e,t,r,n,i){var a=R(),o=a.proc("body",i);return w.optional(function(){a.commandStr=t.commandStr,a.command=a.link(t.commandStr)}),g&&(a.instancing=o.def(a.shared.extensions,".angle_instanced_arrays")),e(a,o,r,n),a.compile().body}function Y(e,t,r,n){N(e,t),r.useVAO?r.drawVAO?t(e.shared.vao,".setVAO(",r.drawVAO.append(e,t),");"):t(e.shared.vao,".setVAO(",e.shared.vao,".targetVAO);"):(t(e.shared.vao,".setVAO(null);"),Z(e,t,r,n.attributes,function(){return!0})),G(e,t,r,n.uniforms,function(){return!0},!1),q(e,t,t,r)}function Q(e,t,r,n){function i(){return!0}e.batchId="a1",N(e,t),Z(e,t,r,n.attributes,i),G(e,t,r,n.uniforms,i,!1),q(e,t,t,r)}function X(e,t,r,n){N(e,t);var i=r.contextDep,a=t.def(),o=t.def();e.shared.props=o,e.batchId=a;var s=e.scope(),l=e.scope();function f(e){return e.contextDep&&i||e.propDep}function u(e){return!f(e)}if(t(s.entry,"for(",a,"=0;",a,"<","a1",";++",a,"){",o,"=","a0","[",a,"];",l,"}",s.exit),r.needsContext&&L(e,l,r.context),r.needsFramebuffer&&F(e,l,r.framebuffer),$(e,l,r.state,f),r.profile&&f(r.profile)&&U(e,l,r,!1,!0),n)r.useVAO?r.drawVAO?f(r.drawVAO)?l(e.shared.vao,".setVAO(",r.drawVAO.append(e,l),");"):s(e.shared.vao,".setVAO(",r.drawVAO.append(e,s),");"):s(e.shared.vao,".setVAO(",e.shared.vao,".targetVAO);"):(s(e.shared.vao,".setVAO(null);"),Z(e,s,r,n.attributes,u),Z(e,l,r,n.attributes,f)),G(e,s,r,n.uniforms,u,!1),G(e,l,r,n.uniforms,f,!0),q(e,s,l,r);else{var c=e.global.def("{}"),d=r.shader.progVar.append(e,l),p=l.def(d,".id"),m=l.def(c,"[",p,"]");l(e.shared.gl,".useProgram(",d,".program);","if(!",m,"){",m,"=",c,"[",p,"]=",e.link(function(t){return H(Q,e,r,t,2)}),"(",d,");}",m,".call(this,a0[",a,"],",a,");")}}function ee(e,t,r){var n=t.static[r];if(n&&function(e){if(!("object"!=typeof e||J(e))){for(var t=Object.keys(e),r=0;r=0,'unknown parameter "'+t+'"',d.commandStr)})}t(P),t(I)});var j=function(e,t){var r=e.static;if("string"==typeof r[te]&&"string"==typeof r[e9]){if(Object.keys(t.dynamic).length>0)return null;var n=t.static,i=Object.keys(n);if(i.length>0&&"number"==typeof n[i[0]]){for(var a=[],o=0;o=0,"invalid "+e,r.commandStr)):l=!1,"height"in s?(o=0|s.height,w.command(o>=0,"invalid "+e,r.commandStr)):l=!1,new tE(!l&&t&&t.thisDep,!l&&t&&t.contextDep,!l&&t&&t.propDep,function(e,t){var r=e.shared.context,n=a;"width"in s||(n=t.def(r,".",tl,"-",f));var i=o;return"height"in s||(i=t.def(r,".",tf,"-",u)),[f,u,n,i]})}if(e in i){var c=i[e],d=t_(c,function(t,r){var n=t.invoke(r,c);w.optional(function(){t.assert(r,n+"&&typeof "+n+'==="object"',"invalid "+e)});var i=t.shared.context,a=r.def(n,".x|0"),o=r.def(n,".y|0"),s=r.def('"width" in ',n,"?",n,".width|0:","(",i,".",tl,"-",a,")"),l=r.def('"height" in ',n,"?",n,".height|0:","(",i,".",tf,"-",o,")");return w.optional(function(){t.assert(r,s+">=0&&"+l+">=0","invalid "+e)}),[a,o,s,l]});return t&&(d.thisDep=d.thisDep||t.thisDep,d.contextDep=d.contextDep||t.contextDep,d.propDep=d.propDep||t.propDep),d}return t?new tE(t.thisDep,t.contextDep,t.propDep,function(e,t){var r=e.shared.context;return[0,0,t.def(r,".",tl),t.def(r,".",tf)]}):null}var o=a(e4);if(o){var s=o;o=new tE(o.thisDep,o.contextDep,o.propDep,function(e,t){var r=s.append(e,t),n=e.shared.context;return t.set(n,"."+tu,r[2]),t.set(n,"."+tc,r[3]),r})}return{viewport:o,scissor_box:a(e6)}}(e,z,d),R=function(e,t){var r=e.static,n=e.dynamic,i={},a=!1,s=function(){if("vao"in r){var e=r.vao;return null!==e&&null===u.getVAO(e)&&(e=u.createVAO(e)),a=!0,i.vao=e,tk(function(t){var r=u.getVAO(e);return r?t.link(r):"null"})}if("vao"in n){a=!0;var t=n.vao;return t_(t,function(e,r){var n=e.invoke(r,t);return r.def(e.shared.vao+".getVAO("+n+")")})}return null}(),l=!1,f=function(){if(tt in r){var e=r[tt];if(i.elements=e,tA(e)){var f=i.elements=o.create(e,!0);e=o.getElements(f),l=!0}else e&&(e=o.getElements(e),l=!0,w.command(e,"invalid elements",t.commandStr));var u=tk(function(t,r){if(e){var n=t.link(e);return t.ELEMENTS=n,n}return t.ELEMENTS=null,null});return u.value=e,u}if(tt in n){l=!0;var c=n[tt];return t_(c,function(e,t){var r=e.shared,n=r.isBufferArgs,i=r.elements,a=e.invoke(t,c),o=t.def("null"),s=t.def(n,"(",a,")"),l=e.cond(s).then(o,"=",i,".createStream(",a,");").else(o,"=",i,".getElements(",a,");");return w.optional(function(){e.assert(l.else,"!"+a+"||"+o,"invalid elements")}),t.entry(l),t.exit(e.cond(s).then(i,".destroyStream(",o,");")),e.ELEMENTS=o,o})}return a?new tE(s.thisDep,s.contextDep,s.propDep,function(e,t){return t.def(e.shared.vao+".currentVAO?"+e.shared.elements+".getElements("+e.shared.vao+".currentVAO.elements):null")}):null}();function c(e,o){if(e in r){var f=0|r[e];return o?i.offset=f:i.instances=f,w.command(!o||f>=0,"invalid "+e,t.commandStr),tk(function(e,t){return o&&(e.OFFSET=f),f})}if(e in n){var u=n[e];return t_(u,function(t,r){var n=t.invoke(r,u);return o&&(t.OFFSET=n,w.optional(function(){t.assert(r,n+">=0","invalid "+e)})),n})}if(o){if(l)return tk(function(e,t){return e.OFFSET=0,0});else if(a)return new tE(s.thisDep,s.contextDep,s.propDep,function(e,t){return t.def(e.shared.vao+".currentVAO?"+e.shared.vao+".currentVAO.offset:0")})}else if(a)return new tE(s.thisDep,s.contextDep,s.propDep,function(e,t){return t.def(e.shared.vao+".currentVAO?"+e.shared.vao+".currentVAO.instances:-1")});return null}var d=c(ti,!0),p=function(){if(tr in r){var e=r[tr];return i.primitive=e,w.commandParameter(e,K,"invalid primitve",t.commandStr),tk(function(t,r){return K[e]})}if(tr in n){var o=n[tr];return t_(o,function(e,t){var r=e.constants.primTypes,n=e.invoke(t,o);return w.optional(function(){e.assert(t,n+" in "+r,"invalid primitive, must be one of "+Object.keys(K))}),t.def(r,"[",n,"]")})}if(l)if(!tT(f))return new tE(f.thisDep,f.contextDep,f.propDep,function(e,t){var r=e.ELEMENTS;return t.def(r,"?",r,".primType:",4)});else if(f.value)return tk(function(e,t){return t.def(e.ELEMENTS,".primType")});else return tk(function(){return 4});return a?new tE(s.thisDep,s.contextDep,s.propDep,function(e,t){return t.def(e.shared.vao+".currentVAO?"+e.shared.vao+".currentVAO.primitive:4")}):null}(),m=function(){if(tn in r){var e=0|r[tn];return i.count=e,w.command("number"==typeof e&&e>=0,"invalid vertex count",t.commandStr),tk(function(){return e})}if(tn in n){var o=n[tn];return t_(o,function(e,t){var r=e.invoke(t,o);return w.optional(function(){e.assert(t,"typeof "+r+'==="number"&&'+r+">=0&&"+r+"===("+r+"|0)","invalid vertex count")}),r})}if(l)if(tT(f))if(f)if(d)return new tE(d.thisDep,d.contextDep,d.propDep,function(e,t){var r=t.def(e.ELEMENTS,".vertCount-",e.OFFSET);return w.optional(function(){e.assert(t,r+">=0","invalid vertex offset/element buffer too small")}),r});else return tk(function(e,t){return t.def(e.ELEMENTS,".vertCount")});else{var u=tk(function(){return -1});return w.optional(function(){u.MISSING=!0}),u}else{var c=new tE(f.thisDep||d.thisDep,f.contextDep||d.contextDep,f.propDep||d.propDep,function(e,t){var r=e.ELEMENTS;return e.OFFSET?t.def(r,"?",r,".vertCount-",e.OFFSET,":-1"):t.def(r,"?",r,".vertCount:-1")});return w.optional(function(){c.DYNAMIC=!0}),c}return a?new tE(s.thisDep,s.contextDep,s.propDep,function(e,t){return t.def(e.shared.vao,".currentVAO?",e.shared.vao,".currentVAO.count:-1")}):null}();return{elements:f,primitive:p,count:m,instances:c(ta,!1),offset:d,vao:s,vaoActive:a,elementsActive:l,static:i}}(e,d),L=(p=e.static,m=e.dynamic,h={},T.forEach(function(e){var t=D(e);function r(r,n){if(e in p){var i=r(p[e]);h[t]=tk(function(){return i})}else if(e in m){var a=m[e];h[t]=t_(a,function(e,t){return n(e,t,e.invoke(t,a))})}}switch(e){case eU:case ez:case ej:case eJ:case eF:case e5:case eH:case eY:case eQ:case eN:return r(function(t){return w.commandType(t,"boolean",e,d.commandStr),t},function(t,r,n){return w.optional(function(){t.assert(r,"typeof "+n+'==="boolean"',"invalid flag "+e,t.commandStr)}),n});case eV:return r(function(t){return w.commandParameter(t,tg,"invalid "+e,d.commandStr),tg[t]},function(t,r,n){var i=t.constants.compareFuncs;return w.optional(function(){t.assert(r,n+" in "+i,"invalid "+e+", must be one of "+Object.keys(tg))}),r.def(i,"[",n,"]")});case e$:return r(function(e){return w.command(J(e)&&2===e.length&&"number"==typeof e[0]&&"number"==typeof e[1]&&e[0]<=e[1],"depth range is 2d array",d.commandStr),e},function(e,t,r){return w.optional(function(){e.assert(t,e.shared.isArrayLike+"("+r+")&&"+r+".length===2&&typeof "+r+'[0]==="number"&&typeof '+r+'[1]==="number"&&'+r+"[0]<="+r+"[1]","depth range must be a 2d array")}),[t.def("+",r,"[0]"),t.def("+",r,"[1]")]});case eL:return r(function(e){w.commandType(e,"object","blend.func",d.commandStr);var r="srcRGB"in e?e.srcRGB:e.src,n="srcAlpha"in e?e.srcAlpha:e.src,i="dstRGB"in e?e.dstRGB:e.dst,a="dstAlpha"in e?e.dstAlpha:e.dst;return w.commandParameter(r,ty,t+".srcRGB",d.commandStr),w.commandParameter(n,ty,t+".srcAlpha",d.commandStr),w.commandParameter(i,ty,t+".dstRGB",d.commandStr),w.commandParameter(a,ty,t+".dstAlpha",d.commandStr),w.command(-1===tv.indexOf(r+", "+i),"unallowed blending combination (srcRGB, dstRGB) = ("+r+", "+i+")",d.commandStr),[ty[r],ty[i],ty[n],ty[a]]},function(t,r,n){var i=t.constants.blendFuncs;function a(a,o){var s=r.def('"',a,o,'" in ',n,"?",n,".",a,o,":",n,".",a);return w.optional(function(){t.assert(r,s+" in "+i,"invalid "+e+"."+a+o+", must be one of "+Object.keys(ty))}),s}w.optional(function(){t.assert(r,n+"&&typeof "+n+'==="object"',"invalid blend func, must be an object")});var o=a("src","RGB"),s=a("dst","RGB");w.optional(function(){var e=t.constants.invalidBlendCombinations;t.assert(r,e+".indexOf("+o+'+", "+'+s+") === -1 ","unallowed blending combination for (srcRGB, dstRGB)")});var l=r.def(i,"[",o,"]"),f=r.def(i,"[",a("src","Alpha"),"]");return[l,r.def(i,"[",s,"]"),f,r.def(i,"[",a("dst","Alpha"),"]")]});case eR:return r(function(t){return"string"==typeof t?(w.commandParameter(t,v,"invalid "+e,d.commandStr),[v[t],v[t]]):"object"==typeof t?(w.commandParameter(t.rgb,v,e+".rgb",d.commandStr),w.commandParameter(t.alpha,v,e+".alpha",d.commandStr),[v[t.rgb],v[t.alpha]]):void w.commandRaise("invalid blend.equation",d.commandStr)},function(t,r,n){var i=t.constants.blendEquations,a=r.def(),o=r.def(),s=t.cond("typeof ",n,'==="string"');return w.optional(function(){function r(e,r,n){t.assert(e,n+" in "+i,"invalid "+r+", must be one of "+Object.keys(v))}r(s.then,e,n),t.assert(s.else,n+"&&typeof "+n+'==="object"',"invalid "+e),r(s.else,e+".rgb",n+".rgb"),r(s.else,e+".alpha",n+".alpha")}),s.then(a,"=",o,"=",i,"[",n,"];"),s.else(a,"=",i,"[",n,".rgb];",o,"=",i,"[",n,".alpha];"),r(s),[a,o]});case eB:return r(function(e){return w.command(J(e)&&4===e.length,"blend.color must be a 4d array",d.commandStr),M(4,function(t){return+e[t]})},function(e,t,r){return w.optional(function(){e.assert(t,e.shared.isArrayLike+"("+r+")&&"+r+".length===4","blend.color must be a 4d array")}),M(4,function(e){return t.def("+",r,"[",e,"]")})});case e0:return r(function(e){return w.commandType(e,"number",t,d.commandStr),0|e},function(e,t,r){return w.optional(function(){e.assert(t,"typeof "+r+'==="number"',"invalid stencil.mask")}),t.def(r,"|0")});case e1:return r(function(r){w.commandType(r,"object",t,d.commandStr);var n=r.cmp||"keep",i=r.ref||0,a="mask"in r?r.mask:-1;return w.commandParameter(n,tg,e+".cmp",d.commandStr),w.commandType(i,"number",e+".ref",d.commandStr),w.commandType(a,"number",e+".mask",d.commandStr),[tg[n],i,a]},function(e,t,r){var n=e.constants.compareFuncs;return w.optional(function(){function i(){e.assert(t,Array.prototype.join.call(arguments,""),"invalid stencil.func")}i(r+"&&typeof ",r,'==="object"'),i('!("cmp" in ',r,")||(",r,".cmp in ",n,")")}),[t.def('"cmp" in ',r,"?",n,"[",r,".cmp]",":",7680),t.def(r,".ref|0"),t.def('"mask" in ',r,"?",r,".mask|0:-1")]});case e2:case e3:return r(function(r){w.commandType(r,"object",t,d.commandStr);var n=r.fail||"keep",i=r.zfail||"keep",a=r.zpass||"keep";return w.commandParameter(n,tb,e+".fail",d.commandStr),w.commandParameter(i,tb,e+".zfail",d.commandStr),w.commandParameter(a,tb,e+".zpass",d.commandStr),[e===e3?1029:1028,tb[n],tb[i],tb[a]]},function(t,r,n){var i=t.constants.stencilOps;function a(a){return w.optional(function(){t.assert(r,'!("'+a+'" in '+n+")||("+n+"."+a+" in "+i+")","invalid "+e+"."+a+", must be one of "+Object.keys(tb))}),r.def('"',a,'" in ',n,"?",i,"[",n,".",a,"]:",7680)}return w.optional(function(){t.assert(r,n+"&&typeof "+n+'==="object"',"invalid "+e)}),[e===e3?1029:1028,a("fail"),a("zfail"),a("zpass")]});case eK:return r(function(e){w.commandType(e,"object",t,d.commandStr);var r=0|e.factor,n=0|e.units;return w.commandType(r,"number",t+".factor",d.commandStr),w.commandType(n,"number",t+".units",d.commandStr),[r,n]},function(t,r,n){return w.optional(function(){t.assert(r,n+"&&typeof "+n+'==="object"',"invalid "+e)}),[r.def(n,".factor|0"),r.def(n,".units|0")]});case eZ:return r(function(e){var r=0;return"front"===e?r=1028:"back"===e&&(r=1029),w.command(!!r,t,d.commandStr),r},function(e,t,r){return w.optional(function(){e.assert(t,r+'==="front"||'+r+'==="back"',"invalid cull.face")}),t.def(r,'==="front"?',1028,":",1029)});case eq:return r(function(e){return w.command("number"==typeof e&&e>=i.lineWidthDims[0]&&e<=i.lineWidthDims[1],"invalid line width, must be a positive number between "+i.lineWidthDims[0]+" and "+i.lineWidthDims[1],d.commandStr),e},function(e,t,r){return w.optional(function(){e.assert(t,"typeof "+r+'==="number"&&'+r+">="+i.lineWidthDims[0]+"&&"+r+"<="+i.lineWidthDims[1],"invalid line width")}),r});case eG:return r(function(e){return w.commandParameter(e,tw,t,d.commandStr),tw[e]},function(e,t,r){return w.optional(function(){e.assert(t,r+'==="cw"||'+r+'==="ccw"',"invalid frontFace, must be one of cw,ccw")}),t.def(r+'==="cw"?2304:2305')});case eW:return r(function(e){return w.command(J(e)&&4===e.length,"color.mask must be length 4 array",d.commandStr),e.map(function(e){return!!e})},function(e,t,r){return w.optional(function(){e.assert(t,e.shared.isArrayLike+"("+r+")&&"+r+".length===4","invalid color.mask")}),M(4,function(e){return"!!"+r+"["+e+"]"})});case eX:return r(function(e){w.command("object"==typeof e&&e,t,d.commandStr);var r="value"in e?e.value:1,n=!!e.invert;return w.command("number"==typeof r&&r>=0&&r<=1,"sample.coverage.value must be a number between 0 and 1",d.commandStr),[r,n]},function(e,t,r){return w.optional(function(){e.assert(t,r+"&&typeof "+r+'==="object"',"invalid sample.coverage")}),[t.def('"value" in ',r,"?+",r,".value:1"),t.def("!!",r,".invert")]})}}),h),F=function(e,t,n){var i,a=e.static,o=e.dynamic;function s(e){if(e in a){var t=r.id(a[e]);w.optional(function(){c.shader(tx[e],t,w.guessCommand())});var n=tk(function(){return t});return n.id=t,n}if(e in o){var i=o[e];return t_(i,function(t,r){var n=t.invoke(r,i),a=r.def(t.shared.strings,".id(",n,")");return w.optional(function(){r(t.shared.shader,".shader(",tx[e],",",a,",",t.command,");")}),a})}return null}var l=s(te),f=s(e9),u=null;return tT(l)&&tT(f)?(u=c.program(f.id,l.id,null,n),i=tk(function(e,t){return e.link(u)})):i=new tE(l&&l.thisDep||f&&f.thisDep,l&&l.contextDep||f&&f.contextDep,l&&l.propDep||f&&f.propDep,function(e,t){var r,n,i=e.shared.shader;r=l?l.append(e,t):t.def(i,".",te),n=f?f.append(e,t):t.def(i,".",e9);var a=i+".program("+n+","+r;return w.optional(function(){a+=","+e.command}),t.def(a+")")}),{frag:l,vert:f,progVar:i,program:u}}(e,0,j);function V(e){var t=B[e];t&&(L[e]=t)}V(e4),V(D(e6));var $=Object.keys(L).length>0,N={framebuffer:z,draw:R,shader:F,state:L,dirty:$,scopeVAO:null,drawVAO:null,useVAO:!1,attributes:{}};if(N.profile=function(e){var t,r=e.static,n=e.dynamic;if(e8 in r){var i=!!r[e8];(t=tk(function(e,t){return i})).enable=i}else if(e8 in n){var a=n[e8];t=t_(a,function(e,t){return e.invoke(t,a)})}return t}(e,d),b=s.static,x=s.dynamic,A={},Object.keys(b).forEach(function(e){var t,r=b[e];if("number"==typeof r||"boolean"==typeof r)t=tk(function(){return r});else if("function"==typeof r){var n=r._reglType;"texture2d"===n||"textureCube"===n?t=tk(function(e){return e.link(r)}):"framebuffer"===n||"framebufferCube"===n?(w.command(r.color.length>0,'missing color attachment for framebuffer sent to uniform "'+e+'"',d.commandStr),t=tk(function(e){return e.link(r.color[0])})):w.commandRaise('invalid data for uniform "'+e+'"',d.commandStr)}else J(r)?t=tk(function(t){return t.global.def("[",M(r.length,function(n){return w.command("number"==typeof r[n]||"boolean"==typeof r[n],"invalid uniform "+e,t.commandStr),r[n]}),"]")}):w.commandRaise('invalid or missing data for uniform "'+e+'"',d.commandStr);t.value=r,A[e]=t}),Object.keys(x).forEach(function(e){var t=x[e];A[e]=t_(t,function(e,r){return e.invoke(r,t)})}),N.uniforms=A,N.drawVAO=N.scopeVAO=R.vao,!N.drawVAO&&F.program&&!j&&n.angle_instanced_arrays&&R.static.elements){var U=!0,Z=F.program.attributes.map(function(e){var r=t.static[e];return U=U&&!!r,r});if(U&&Z.length>0){var G=u.getVAO(u.createVAO({attributes:Z,elements:R.static.elements}));N.drawVAO=new tE(null,null,null,function(e,t){return e.link(G)}),N.useVAO=!0}}return j?N.useVAO=!0:(S=t.static,E=t.dynamic,k={},Object.keys(S).forEach(function(e){var t=S[e],n=r.id(e),i=new y;if(tA(t))i.state=1,i.buffer=a.getBuffer(a.create(t,34962,!1,!0)),i.type=0;else{var o=a.getBuffer(t);if(o)i.state=1,i.buffer=o,i.type=0;else if(w.command("object"==typeof t&&t,"invalid data for attribute "+e,d.commandStr),"constant"in t){var s=t.constant;i.buffer="null",i.state=2,"number"==typeof s?i.x=s:(w.command(J(s)&&s.length>0&&s.length<=4,"invalid constant for attribute "+e,d.commandStr),eI.forEach(function(e,t){t=0,'invalid offset for attribute "'+e+'"',d.commandStr);var f=0|t.stride;w.command(f>=0&&f<256,'invalid stride for attribute "'+e+'", must be integer betweeen [0, 255]',d.commandStr);var u=0|t.size;w.command(!("size"in t)||u>0&&u<=4,'invalid size for attribute "'+e+'", must be 1,2,3,4',d.commandStr);var c=!!t.normalized,p=0;"type"in t&&(w.commandParameter(t.type,W,"invalid type for attribute "+e,d.commandStr),p=W[t.type]);var m=0|t.divisor;w.optional(function(){"divisor"in t&&(w.command(0===m||g,'cannot specify divisor for attribute "'+e+'", instancing not supported',d.commandStr),w.command(m>=0,'invalid divisor for attribute "'+e+'"',d.commandStr));var r=d.commandStr,n=["buffer","offset","divisor","normalized","type","size","stride"];Object.keys(t).forEach(function(t){w.command(n.indexOf(t)>=0,'unknown parameter "'+t+'" for attribute pointer "'+e+'" (valid parameters are '+n+")",r)})}),i.buffer=o,i.state=1,i.size=u,i.normalized=c,i.type=p||o.dtype,i.offset=l,i.stride=f,i.divisor=m}}k[e]=tk(function(e,t){var r=e.attribCache;if(n in r)return r[n];var a={isStream:!1};return Object.keys(i).forEach(function(e){a[e]=i[e]}),i.buffer&&(a.buffer=e.link(i.buffer),a.type=a.type||a.buffer+".dtype"),r[n]=a,a})}),Object.keys(E).forEach(function(e){var t=E[e];k[e]=t_(t,function(r,n){var i=r.invoke(n,t),a=r.shared,o=r.constants,s=a.isBufferArgs,l=a.buffer;w.optional(function(){r.assert(n,i+"&&(typeof "+i+'==="object"||typeof '+i+'==="function")&&('+s+"("+i+")||"+l+".getBuffer("+i+")||"+l+".getBuffer("+i+".buffer)||"+s+"("+i+'.buffer)||("constant" in '+i+"&&(typeof "+i+'.constant==="number"||'+a.isArrayLike+"("+i+".constant))))",'invalid dynamic attribute "'+e+'"')});var f={isStream:n.def(!1)},u=new y;u.state=1,Object.keys(u).forEach(function(e){f[e]=n.def(""+u[e])});var c=f.buffer,d=f.type;function p(e){n(f[e],"=",i,".",e,"|0;")}return n("if(",s,"(",i,")){",f.isStream,"=true;",c,"=",l,".createStream(",34962,",",i,");",d,"=",c,".dtype;","}else{",c,"=",l,".getBuffer(",i,");","if(",c,"){",d,"=",c,".dtype;",'}else if("constant" in ',i,"){",f.state,"=",2,";","if(typeof "+i+'.constant === "number"){',f[eI[0]],"=",i,".constant;",eI.slice(1).map(function(e){return f[e]}).join("="),"=0;","}else{",eI.map(function(e,t){return f[e]+"="+i+".constant.length>"+t+"?"+i+".constant["+t+"]:0;"}).join(""),"}}else{","if(",s,"(",i,".buffer)){",c,"=",l,".createStream(",34962,",",i,".buffer);","}else{",c,"=",l,".getBuffer(",i,".buffer);","}",d,'="type" in ',i,"?",o.glTypes,"[",i,".type]:",c,".dtype;",f.normalized,"=!!",i,".normalized;"),p("size"),p("offset"),p("stride"),p("divisor"),n("}}"),n.exit("if(",f.isStream,"){",l,".destroyStream(",c,");","}"),f})}),N.attributes=k),_=f.static,O=f.dynamic,C={},Object.keys(_).forEach(function(e){var t=_[e];C[e]=tk(function(e,r){return"number"==typeof t||"boolean"==typeof t?""+t:e.link(t)})}),Object.keys(O).forEach(function(e){var t=O[e];C[e]=t_(t,function(e,r){return e.invoke(r,t)})}),N.context=C,N}(e,s,f,d,m);!function(e,t){var r=e.proc("draw",1);N(e,r),L(e,r,t.context),F(e,r,t.framebuffer),V(e,r,t),$(e,r,t.state),U(e,r,t,!1,!0);var n=t.shader.progVar.append(e,r);if(r(e.shared.gl,".useProgram(",n,".program);"),t.shader.program)Y(e,r,t,t.shader.program);else{r(e.shared.vao,".setVAO(null);");var i=e.global.def("{}"),a=r.def(n,".id"),o=r.def(i,"[",a,"]");r(e.cond(o).then(o,".call(this,a0);").else(o,"=",i,"[",a,"]=",e.link(function(r){return H(Y,e,t,r,1)}),"(",n,");",o,".call(this,a0);"))}Object.keys(t.state).length>0&&r(e.shared.current,".dirty=true;"),e.shared.vao&&r(e.shared.vao,".setVAO(null);")}(m,h);var b=m.proc("scope",3);m.batchId="a2";var x=m.shared,A=x.current;function S(e){var t=h.shader[e];t&&b.set(x.shader,"."+e,t.append(m,b))}return L(m,b,h.context),h.framebuffer&&h.framebuffer.append(m,b),tS(Object.keys(h.state)).forEach(function(e){var t=h.state[e].append(m,b);J(t)?t.forEach(function(t,r){b.set(m.next[e],"["+r+"]",t)}):b.set(x.next,"."+e,t)}),U(m,b,h,!0,!0),[tt,ti,tn,ta,tr].forEach(function(e){var t=h.draw[e];t&&b.set(x.draw,"."+e,""+t.append(m,b))}),Object.keys(h.uniforms).forEach(function(e){var t=h.uniforms[e].append(m,b);Array.isArray(t)&&(t="["+t.join()+"]"),b.set(x.uniforms,"["+r.id(e)+"]",t)}),Object.keys(h.attributes).forEach(function(e){var t=h.attributes[e].append(m,b),r=m.scopeAttrib(e);Object.keys(new y).forEach(function(e){b.set(r,"."+e,t[e])})}),h.scopeVAO&&b.set(x.vao,".targetVAO",h.scopeVAO.append(m,b)),S(e9),S(te),Object.keys(h.state).length>0&&(b(A,".dirty=true;"),b.exit(A,".dirty=true;")),b("a1(",m.shared.context,",a0,",m.batchId,");"),!function(e,t){var r=e.proc("batch",2);e.batchId="0",N(e,r);var n=!1,i=!0;Object.keys(t.context).forEach(function(e){n=n||t.context[e].propDep}),n||(L(e,r,t.context),i=!1);var a=t.framebuffer,o=!1;function s(e){return e.contextDep&&n||e.propDep}a?(a.propDep?n=o=!0:a.contextDep&&n&&(o=!0),o||F(e,r,a)):F(e,r,null),t.state.viewport&&t.state.viewport.propDep&&(n=!0),V(e,r,t),$(e,r,t.state,function(e){return!s(e)}),t.profile&&s(t.profile)||U(e,r,t,!1,"a1"),t.contextDep=n,t.needsContext=i,t.needsFramebuffer=o;var l=t.shader.progVar;if(l.contextDep&&n||l.propDep)X(e,r,t,null);else{var f=l.append(e,r);if(r(e.shared.gl,".useProgram(",f,".program);"),t.shader.program)X(e,r,t,t.shader.program);else{r(e.shared.vao,".setVAO(null);");var u=e.global.def("{}"),c=r.def(f,".id"),d=r.def(u,"[",c,"]");r(e.cond(d).then(d,".call(this,a0,a1);").else(d,"=",u,"[",c,"]=",e.link(function(r){return H(X,e,t,r,2)}),"(",f,");",d,".call(this,a0,a1);"))}}Object.keys(t.state).length>0&&r(e.shared.current,".dirty=true;"),e.shared.vao&&r(e.shared.vao,".setVAO(null);")}(m,h),t(m.compile(),{destroy:function(){h.shader.program.destroy()}})}}}(o,u,d,b,x,A,0,N,{},E,I,g,v,p,a),Q=function(t,r,n,i,a,o,s){function l(l){null===r.next?(w(a.preserveDrawingBuffer,'you must create a webgl context with "preserveDrawingBuffer":true in order to read pixels from the drawing buffer'),f=5121):(w(null!==r.next.colorAttachments[0].texture,"You cannot read from a renderbuffer"),f=r.next.colorAttachments[0].texture._texture.type,w.optional(function(){o.oes_texture_float?(w(5121===f||5126===f,"Reading from a framebuffer is only allowed for the types 'uint8' and 'float'"),5126===f&&w(s.readFloat,"Reading 'float' values is not permitted in your browser. For a fallback, please see: https://www.npmjs.com/package/glsl-read-float")):w(5121===f,"Reading from a framebuffer is only allowed for the type 'uint8'")}));var f,u=0,c=0,d=i.framebufferWidth,p=i.framebufferHeight,m=null;e(l)?m=l:l&&(w.type(l,"object","invalid arguments to regl.read()"),u=0|l.x,c=0|l.y,w(u>=0&&u=0&&c0&&d+u<=i.framebufferWidth,"invalid width for read pixels"),w(p>0&&p+c<=i.framebufferHeight,"invalid height for read pixels"),n();var h=d*p*4;return m||(5121===f?m=new Uint8Array(h):5126===f&&(m=m||new Float32Array(h))),w.isTypedArray(m,"data buffer for regl.read() must be a typedarray"),w(m.byteLength>=h,"data buffer for regl.read() too small"),t.pixelStorei(3333,4),t.readPixels(u,c,d,p,6408,f,m),m}return function(e){var t;return e&&"framebuffer"in e?(r.setFBO({framebuffer:e.framebuffer},function(){t=l(e)}),t):l(e)}}(o,N,Y.procs.poll,v,s,d,b),ei=Y.next,ec=o.canvas,ed=[],ep=[],eA=[],to=[a.onDestroy],ts=null;function td(){if(0===ed.length){p&&p.update(),ts=null;return}ts=O.next(td),tN();for(var e=ed.length-1;e>=0;--e){var t=ed[e];t&&t(v,null,0)}o.flush(),p&&p.update()}function tI(){!ts&&ed.length>0&&(ts=O.next(td))}function tj(){ts&&(O.cancel(td),ts=null)}function tz(e){e.preventDefault(),l=!0,tj(),ep.forEach(function(e){e()})}function tB(e){o.getError(),l=!1,f.restore(),I.restore(),x.restore(),j.restore(),$.restore(),N.restore(),E.restore(),p&&p.restore(),Y.procs.refresh(),tI(),eA.forEach(function(e){e()})}function tR(e){function r(e,t){var r={},n={};return Object.keys(e).forEach(function(i){var a=e[i];if(k(a)){n[i]=_(a,i);return}if(t&&Array.isArray(a)){for(var o=0;o0)return c.call(this,function(e){for(;p.length=0,"cannot cancel a frame twice"),ed[t]=function e(){var t=tM(ed,e);ed[t]=ed[ed.length-1],ed.length-=1,ed.length<=0&&tj()}}}}function t$(){var e=ei.viewport,t=ei.scissor_box;e[0]=e[1]=t[0]=t[1]=0,v.viewportWidth=v.framebufferWidth=v.drawingBufferWidth=e[2]=t[2]=o.drawingBufferWidth,v.viewportHeight=v.framebufferHeight=v.drawingBufferHeight=e[3]=t[3]=o.drawingBufferHeight}function tN(){v.tick+=1,v.time=tU(),t$(),Y.procs.poll()}function tW(){j.refresh(),t$(),Y.procs.refresh(),p&&p.update()}function tU(){return(C()-m)/1e3}tW();var tZ=t(tR,{clear:function(e){if(w("object"==typeof e&&e,"regl.clear() takes an object as input"),"framebuffer"in e)if(e.framebuffer&&"framebufferCube"===e.framebuffer_reglType)for(var r=0;r<6;++r)tL(t({framebuffer:e.framebuffer.faces[r]},e),tF);else tL(e,tF);else tF(null,e)},prop:T.bind(null,1),context:T.bind(null,2),this:T.bind(null,3),draw:tR({}),buffer:function(e){return x.create(e,34962,!1,!1)},elements:function(e){return A.create(e,!1)},texture:j.create2D,cube:j.createCube,renderbuffer:$.create,framebuffer:N.create,framebufferCube:N.createCube,vao:E.createVAO,attributes:s,frame:tV,on:function(e,t){var r;switch(w.type(t,"function","listener callback must be a function"),e){case"frame":return tV(t);case"lost":r=ep;break;case"restore":r=eA;break;case"destroy":r=to;break;default:w.raise("invalid event, must be one of frame,lost,restore,destroy")}return r.push(t),{cancel:function(){for(var e=0;e=0},read:Q,destroy:function(){ed.length=0,tj(),ec&&(ec.removeEventListener(tD,tz),ec.removeEventListener(tP,tB)),I.clear(),N.clear(),$.clear(),E.clear(),j.clear(),A.clear(),x.clear(),p&&p.clear(),to.forEach(function(e){e()})},_gl:o,_refresh:tW,poll:function(){tN(),p&&p.update()},now:tU,stats:c});return a.onDone(null,tZ),tZ}}()},42030,e=>{"use strict";let t,r=new window.BroadcastChannel("pub-sub-es"),n=(e,t)=>"string"==typeof e&&t?e.toLowerCase():e,i=(e,t)=>(r,i,a=1/0)=>{let o=n(r,t?.caseInsensitive),s=e[o]||[];return s.push({handler:i,times:+a||1/0}),e[o]=s,{event:o,handler:i}};function a(e,t){return function(r,i){let a,o;"object"==typeof r?(o=r.handler,a=r.event):(a=r,o=i);let s=e[n(a,t?.caseInsensitive)];if(!s)return;let l=s.findIndex(e=>e.handler===o);-1===l||l>=s.length||s.splice(l,1)}}let o=(e,t)=>{let i=a(e);return(...a)=>{let[o,s,l]=a,f=n(o,t?.caseInsensitive),u=e[f];if(!u)return;let c=[...u];for(let e of c)--e.times<1&&i(f,e.handler);let d=l?.async!==void 0?l.async:t?.async,p=()=>{for(let e of c)e.handler(s)};if(d?setTimeout(p,0):p(),t?.isGlobal&&!l?.isNoGlobalBroadcast)try{r.postMessage({event:f,news:s})}catch(e){if(e instanceof Error&&"DataCloneError"===e.name)console.warn(`Could not broadcast '${f.toString()}' globally. Payload is not clonable.`);else throw e}}},s=e=>()=>{for(let t of Object.keys(e))delete e[t]},l=()=>({}),f=l(),u={publish:o(f,{isGlobal:!0}),subscribe:i(f),unsubscribe:a(f),clear:s(f),stack:f};r.onmessage=({data:{event:e,news:t}})=>u.publish(e,t,{isNoGlobalBroadcast:!0});var c,d,p=e.i(25443);let m=e=>e<.5?4*e*e*e:(e-1)*(2*e-2)*(2*e-2)+1,h=e=>e,y=(e,t)=>{if(e===t)return!0;if(e.length!==t.length)return!1;let r=new Set(e),n=new Set(t);return r.size===n.size&&t.every(e=>r.has(e))},v=(e,t=e=>e)=>{let r=[];for(let n=0;n(t.forEach(t=>{let r=Object.keys(t).reduce((e,r)=>(e[r]=Object.getOwnPropertyDescriptor(t,r),e),{});Object.getOwnPropertySymbols(t).forEach(e=>{let n=Object.getOwnPropertyDescriptor(t,e);n.enumerable&&(r[e]=n)}),Object.defineProperties(e,r)}),e),b=(e,t)=>r=>g(r,{get[e](){return t}}),x=(e=1)=>new Promise(t=>{let r=0,n=()=>requestAnimationFrame(()=>{++r{let n,i=0;r=null===r?t:r;let a=(...t)=>{clearTimeout(n),n=setTimeout(()=>{i>0&&(e(...t),i=0)},r)},o=!1,s=(...r)=>{o?(i++,a(...r)):(e(...r),a(...r),o=!0,i=0,setTimeout(()=>{o=!1},t))};return s.reset=()=>{o=!1},s.cancel=()=>{clearTimeout(n)},s.now=(...t)=>e(...t),s};var A="undefined"!=typeof Float32Array?Float32Array:Array;function S(){var e=new A(16);return A!=Float32Array&&(e[1]=0,e[2]=0,e[3]=0,e[4]=0,e[6]=0,e[7]=0,e[8]=0,e[9]=0,e[11]=0,e[12]=0,e[13]=0,e[14]=0),e[0]=1,e[5]=1,e[10]=1,e[15]=1,e}function E(e){var t=new A(16);return t[0]=e[0],t[1]=e[1],t[2]=e[2],t[3]=e[3],t[4]=e[4],t[5]=e[5],t[6]=e[6],t[7]=e[7],t[8]=e[8],t[9]=e[9],t[10]=e[10],t[11]=e[11],t[12]=e[12],t[13]=e[13],t[14]=e[14],t[15]=e[15],t}function T(e,t){var r=t[0],n=t[1],i=t[2],a=t[3],o=t[4],s=t[5],l=t[6],f=t[7],u=t[8],c=t[9],d=t[10],p=t[11],m=t[12],h=t[13],y=t[14],v=t[15],g=r*s-n*o,b=r*l-i*o,x=r*f-a*o,w=n*l-i*s,A=n*f-a*s,S=i*f-a*l,E=u*h-c*m,T=u*y-d*m,k=u*v-p*m,_=c*y-d*h,O=c*v-p*h,C=d*v-p*y,D=g*C-b*O+x*_+w*k-A*T+S*E;return D?(D=1/D,e[0]=(s*C-l*O+f*_)*D,e[1]=(i*O-n*C-a*_)*D,e[2]=(h*S-y*A+v*w)*D,e[3]=(d*A-c*S-p*w)*D,e[4]=(l*k-o*C-f*T)*D,e[5]=(r*C-i*k+a*T)*D,e[6]=(y*x-m*S-v*b)*D,e[7]=(u*S-d*x+p*b)*D,e[8]=(o*O-s*k+f*E)*D,e[9]=(n*k-r*O-a*E)*D,e[10]=(m*A-h*x+v*g)*D,e[11]=(c*x-u*A-p*g)*D,e[12]=(s*T-o*_-l*E)*D,e[13]=(r*_-n*T+i*E)*D,e[14]=(h*b-m*w-y*g)*D,e[15]=(u*w-c*b+d*g)*D,e):null}function k(e,t,r){var n=t[0],i=t[1],a=t[2],o=t[3],s=t[4],l=t[5],f=t[6],u=t[7],c=t[8],d=t[9],p=t[10],m=t[11],h=t[12],y=t[13],v=t[14],g=t[15],b=r[0],x=r[1],w=r[2],A=r[3];return e[0]=b*n+x*s+w*c+A*h,e[1]=b*i+x*l+w*d+A*y,e[2]=b*a+x*f+w*p+A*v,e[3]=b*o+x*u+w*m+A*g,b=r[4],x=r[5],w=r[6],A=r[7],e[4]=b*n+x*s+w*c+A*h,e[5]=b*i+x*l+w*d+A*y,e[6]=b*a+x*f+w*p+A*v,e[7]=b*o+x*u+w*m+A*g,b=r[8],x=r[9],w=r[10],A=r[11],e[8]=b*n+x*s+w*c+A*h,e[9]=b*i+x*l+w*d+A*y,e[10]=b*a+x*f+w*p+A*v,e[11]=b*o+x*u+w*m+A*g,b=r[12],x=r[13],w=r[14],A=r[15],e[12]=b*n+x*s+w*c+A*h,e[13]=b*i+x*l+w*d+A*y,e[14]=b*a+x*f+w*p+A*v,e[15]=b*o+x*u+w*m+A*g,e}function _(e,t){return e[0]=1,e[1]=0,e[2]=0,e[3]=0,e[4]=0,e[5]=1,e[6]=0,e[7]=0,e[8]=0,e[9]=0,e[10]=1,e[11]=0,e[12]=t[0],e[13]=t[1],e[14]=t[2],e[15]=1,e}function O(e,t){return e[0]=t[0],e[1]=0,e[2]=0,e[3]=0,e[4]=0,e[5]=t[1],e[6]=0,e[7]=0,e[8]=0,e[9]=0,e[10]=t[2],e[11]=0,e[12]=0,e[13]=0,e[14]=0,e[15]=1,e}function C(e,t,r){var n=t[0],i=t[1],a=t[2],o=t[3];return e[0]=r[0]*n+r[4]*i+r[8]*a+r[12]*o,e[1]=r[1]*n+r[5]*i+r[9]*a+r[13]*o,e[2]=r[2]*n+r[6]*i+r[10]*a+r[14]*o,e[3]=r[3]*n+r[7]*i+r[11]*a+r[15]*o,e}Math.hypot||(Math.hypot=function(){for(var e=0,t=arguments.length;t--;)e+=arguments[t]*arguments[t];return Math.sqrt(e)}),c=new A(4),A!=Float32Array&&(c[0]=0,c[1]=0,c[2]=0,c[3]=0),d=new A(2),A!=Float32Array&&(d[0]=0,d[1]=0);let D=["pan","rotate"],P={alt:"altKey",cmd:"metaKey",ctrl:"ctrlKey",meta:"metaKey",shift:"shiftKey"};function M(e,t){if(!e)return e;t||(t=e);let r=e,n;do if(n=!1,!r.steiner&&(B(r,r.next)||0===z(r.prev,r,r.next))){if(N(r),(r=t=r.prev)===r.next)break;n=!0}else r=r.next;while(n||r!==t)return t}function I(e,t,r,n,i){return(e=((e=((e=((e=((e=(e-r)*i|0)|e<<8)&0xff00ff)|e<<4)&0xf0f0f0f)|e<<2)&0x33333333)|e<<1)&0x55555555)|(t=((t=((t=((t=((t=(t-n)*i|0)|t<<8)&0xff00ff)|t<<4)&0xf0f0f0f)|t<<2)&0x33333333)|t<<1)&0x55555555)<<1}function j(e,t,r,n,i,a,o,s){return(e!==o||t!==s)&&(i-o)*(t-s)>=(e-o)*(a-s)&&(e-o)*(n-s)>=(r-o)*(t-s)&&(r-o)*(a-s)>=(i-o)*(n-s)}function z(e,t,r){return(t.y-e.y)*(r.x-t.x)-(t.x-e.x)*(r.y-t.y)}function B(e,t){return e.x===t.x&&e.y===t.y}function R(e,t,r,n){let i=F(z(e,t,r)),a=F(z(e,t,n)),o=F(z(r,n,e)),s=F(z(r,n,t));return!!(i!==a&&o!==s||0===i&&L(e,r,t)||0===a&&L(e,n,t)||0===o&&L(r,e,n)||0===s&&L(r,t,n))}function L(e,t,r){return t.x<=Math.max(e.x,r.x)&&t.x>=Math.min(e.x,r.x)&&t.y<=Math.max(e.y,r.y)&&t.y>=Math.min(e.y,r.y)}function F(e){return e>0?1:e<0?-1:0}function V(e,t){return 0>z(e.prev,e,e.next)?z(e,t,e.next)>=0&&z(e,e.prev,t)>=0:0>z(e,t,e.prev)||0>z(e,e.next,t)}function $(e,t,r,n){let i=W(e,t,r);return n?(i.next=n.next,i.prev=n,n.next.prev=i,n.next=i):(i.prev=i,i.next=i),i}function N(e){e.next.prev=e.prev,e.prev.next=e.next,e.prevZ&&(e.prevZ.nextZ=e.nextZ),e.nextZ&&(e.nextZ.prevZ=e.prevZ)}function W(e,t,r){return{i:e,x:t,y:r,prev:null,next:null,z:0,prevZ:null,nextZ:null,steiner:!1}}let U=`
-precision mediump float;
-varying vec4 color;
-void main() {
- gl_FragColor = color;
-}`,Z=`
-uniform mat4 projectionViewModel;
-uniform float aspectRatio;
-
-uniform sampler2D colorTex;
-uniform float colorTexRes;
-uniform float colorTexEps;
-uniform float width;
-uniform float useOpacity;
-uniform float useColorOpacity;
-uniform int miter;
-
-attribute vec3 prevPosition;
-attribute vec3 currPosition;
-attribute vec3 nextPosition;
-attribute float opacity;
-attribute float offsetScale;
-attribute float colorIndex;
-
-varying vec4 color;
-
-void main() {
- vec2 aspectVec = vec2(aspectRatio, 1.0);
- vec4 prevProjected = projectionViewModel * vec4(prevPosition, 1.0);
- vec4 currProjected = projectionViewModel * vec4(currPosition, 1.0);
- vec4 nextProjected = projectionViewModel * vec4(nextPosition, 1.0);
-
- // get 2D screen space with W divide and aspect correction
- vec2 prevScreen = prevProjected.xy / prevProjected.w * aspectVec;
- vec2 currScreen = currProjected.xy / currProjected.w * aspectVec;
- vec2 nextScreen = nextProjected.xy / nextProjected.w * aspectVec;
-
- float len = width;
-
- // starting point uses (next - current)
- vec2 dir = vec2(0.0);
- if (currScreen == prevScreen) {
- dir = normalize(nextScreen - currScreen);
- }
- // ending point uses (current - previous)
- else if (currScreen == nextScreen) {
- dir = normalize(currScreen - prevScreen);
- }
- // somewhere in middle, needs a join
- else {
- // get directions from (C - B) and (B - A)
- vec2 dirA = normalize((currScreen - prevScreen));
- if (miter == 1) {
- vec2 dirB = normalize((nextScreen - currScreen));
- // now compute the miter join normal and length
- vec2 tangent = normalize(dirA + dirB);
- vec2 perp = vec2(-dirA.y, dirA.x);
- vec2 miter = vec2(-tangent.y, tangent.x);
- len = width / dot(miter, perp);
- dir = tangent;
- } else {
- dir = dirA;
- }
- }
-
- vec2 normal = vec2(-dir.y, dir.x) * len;
- normal.x /= aspectRatio;
- vec4 offset = vec4(normal * offsetScale, 0.0, 0.0);
- gl_Position = currProjected + offset;
-
- // Get color from texture
- float colorRowIndex = floor((colorIndex + colorTexEps) / colorTexRes);
- vec2 colorTexIndex = vec2(
- (colorIndex / colorTexRes) - colorRowIndex + colorTexEps,
- colorRowIndex / colorTexRes + colorTexEps
- );
-
- color = texture2D(colorTex, colorTexIndex);
- color.a = useColorOpacity * color.a + useOpacity * opacity;
-}`,G=new Float32Array([1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]),q=Float32Array.BYTES_PER_ELEMENT,H=e=>e.length>0&&Array.isArray(e[0]),{push:K,splice:Y}=Array.prototype;function Q(e,t=1,r=1){let n=[],i=Array(2*t);for(let a=0,o=e.length/t;a{let m,h,y,v,g,b,x,w,A,S,E,T,_,O,C,D,P,M;if(!e)return void console.error("Regl instance is undefined.");let I=new Float32Array(16),j=d?2:3,z=()=>+(l.length===h||null!==s),B=()=>{S=e.buffer(),E=e.buffer(),T=e.buffer(),C=e.buffer(),D={prevPosition:{buffer:()=>S,offset:0,stride:3*q},currPosition:{buffer:()=>S,offset:3*q*2,stride:3*q},nextPosition:{buffer:()=>S,offset:3*q*4,stride:3*q},opacity:{buffer:()=>E,offset:2*q,stride:q},offsetScale:{buffer:()=>T,offset:2*q,stride:q},colorIndex:{buffer:()=>C,offset:2*q,stride:q}},P=e.elements(),M=e({attributes:D,depth:{enable:!d},blend:{enable:!0,func:{srcRGB:"src alpha",srcAlpha:"one",dstRGB:"one minus src alpha",dstAlpha:"one minus src alpha"}},uniforms:{projectionViewModel:(e,t)=>{let r=e.projection||t.projection,n=e.model||t.model;return k(I,r,k(I,e.view||t.view,n))},aspectRatio:({viewportWidth:e,viewportHeight:t})=>e/t,colorTex:()=>_,colorTexRes:()=>O,colorTexEps:()=>.5/O,pixelRatio:({pixelRatio:e})=>e,width:({pixelRatio:e,viewportHeight:t})=>f/t*e,useOpacity:z,useColorOpacity:()=>Number(!z()),miter:Number(!!c)},elements:()=>P,vert:Z,frag:U})},R=()=>{L(),B()},L=()=>{i=[],v=[],g=new Float32Array,w=[],A=[],S.destroy(),T.destroy(),P.destroy()},F=(e,t)=>{let r=t.flat(2);return r.length===h?r:r.length===m?y.flatMap((e,t)=>Array(e).fill(r[t])):e},V=(e=[],{colorIndices:t=a,opacities:r=l,widths:n=u,is2d:o=d}={})=>{i=e,d=o,j=d?2:3,m=H(i)?i.length:1,h=(y=H(i)?i.map(e=>Math.floor(e.length/j)):[Math.floor(i.length/j)]).reduce((e,t)=>e+t,0),a=F(a,t),l=F(l,r),u=F(u,n),i&&h>1?(()=>{1===m&&i.length%j>0&&console.warn(`The length of points (${h}) does not match the dimensions (${j}). Incomplete points are ignored.`),v=i.flat().slice(0,h*j),d&&(v=function(e,t,r,n=0){let i=[],a=[,,,].fill(n);for(let t=0,r=e.length/2;t{let r=0;for(let n of e){for(let e=0;e65536?"uint32":"uint16",data:A})})():R()},$=()=>{let t=H(o)?o:[o],r=new Uint8Array((O=Math.max(2,Math.ceil(Math.sqrt(t.length))))**2*4);t.forEach((e,t)=>{r[4*t]=Math.min(255,Math.max(0,Math.round(255*e[0]))),r[4*t+1]=Math.min(255,Math.max(0,Math.round(255*e[1]))),r[4*t+2]=Math.min(255,Math.max(0,Math.round(255*e[2]))),r[4*t+3]=Number.isNaN(+e[3])?255:Math.min(255,Math.max(0,Math.round(255*e[3])))}),_=e.texture({data:r,shape:[O,O,4]})},N=(e,t=s)=>{o=e,s=t,_&&_.destroy(),$()};return B(),$(),i&&i.length>1&&V(i),{clear:R,destroy:L,draw:({projection:e,model:a,view:o}={})=>{e&&(t=e),a&&(r=a),o&&(n=o),i&&i.length>1&&M({projection:t,model:r,view:n})},getPoints:()=>i,setPoints:V,getData:()=>({points:g,widths:w,opacities:x,colorIndices:b}),getBuffer:()=>({points:S,widths:T,opacities:E,colorIndices:C}),getStyle:()=>({color:o,miter:c,width:f}),setStyle:({color:e,opacity:t,miter:r,width:n}={})=>{e&&N(e,t||s),r&&(c=!!r),void 0!==n&&Number.isFinite(n)&&(f=n)}}};var ee=()=>{let e=[Int8Array,Uint8Array,Uint8ClampedArray,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array];class t{static from(r){if(!(r instanceof ArrayBuffer))throw Error("Data must be an instance of ArrayBuffer.");let[n,i]=new Uint8Array(r,0,2);if(219!==n)throw Error("Data does not appear to be in a KDBush format.");let a=i>>4;if(1!==a)throw Error(`Got v${a} data when expected v1.`);let o=e[15&i];if(!o)throw Error("Unrecognized array type.");let[s]=new Uint16Array(r,2,1),[l]=new Uint32Array(r,4,1);return new t(l,s,o,r)}constructor(t,r=64,n=Float64Array,i){if(isNaN(t)||t<0)throw Error(`Unexpected numItems value: ${t}.`);this.numItems=+t,this.nodeSize=Math.min(Math.max(+r,2),65535),this.ArrayType=n,this.IndexArrayType=t<65536?Uint16Array:Uint32Array;const a=e.indexOf(this.ArrayType),o=2*t*this.ArrayType.BYTES_PER_ELEMENT,s=t*this.IndexArrayType.BYTES_PER_ELEMENT,l=(8-s%8)%8;if(a<0)throw Error(`Unexpected typed array class: ${n}.`);i&&i instanceof ArrayBuffer?(this.data=i,this.ids=new this.IndexArrayType(this.data,8,t),this.coords=new this.ArrayType(this.data,8+s+l,2*t),this._pos=2*t,this._finished=!0):(this.data=new ArrayBuffer(8+o+s+l),this.ids=new this.IndexArrayType(this.data,8,t),this.coords=new this.ArrayType(this.data,8+s+l,2*t),this._pos=0,this._finished=!1,new Uint8Array(this.data,0,2).set([219,16+a]),new Uint16Array(this.data,2,1)[0]=r,new Uint32Array(this.data,4,1)[0]=t)}add(e,t){let r=this._pos>>1;return this.ids[r]=r,this.coords[this._pos++]=e,this.coords[this._pos++]=t,r}finish(){let e=this._pos>>1;if(e!==this.numItems)throw Error(`Added ${e} items when expected ${this.numItems}.`);return function e(t,n,i,a,o,s){if(o-a<=i)return;let l=a+o>>1;(function e(t,n,i,a,o,s){for(;o>a;){if(o-a>600){let r=o-a+1,l=i-a+1,f=Math.log(r),u=.5*Math.exp(2*f/3),c=.5*Math.sqrt(f*u*(r-u)/r)*(l-r/2<0?-1:1),d=Math.max(a,Math.floor(i-l*u/r+c)),p=Math.min(o,Math.floor(i+(r-l)*u/r+c));e(t,n,i,d,p,s)}let l=n[2*i+s],f=a,u=o;for(r(t,n,a,i),n[2*o+s]>l&&r(t,n,a,o);fl;)u--}n[2*a+s]===l?r(t,n,a,u):r(t,n,++u,o),u<=i&&(a=u+1),i<=u&&(o=u-1)}})(t,n,l,a,o,s),e(t,n,i,a,l-1,1-s),e(t,n,i,l+1,o,1-s)}(this.ids,this.coords,this.nodeSize,0,this.numItems-1,0),this._finished=!0,this}range(e,t,r,n){if(!this._finished)throw Error("Data not yet indexed - call index.finish().");let{ids:i,coords:a,nodeSize:o}=this,s=[0,i.length-1,0],l=[];for(;s.length;){let f=s.pop()||0,u=s.pop()||0,c=s.pop()||0;if(u-c<=o){for(let o=c;o<=u;o++){let s=a[2*o],f=a[2*o+1];s>=e&&s<=r&&f>=t&&f<=n&&l.push(i[o])}continue}let d=c+u>>1,p=a[2*d],m=a[2*d+1];p>=e&&p<=r&&m>=t&&m<=n&&l.push(i[d]),(0===f?e<=p:t<=m)&&(s.push(c),s.push(d-1),s.push(1-f)),(0===f?r>=p:n>=m)&&(s.push(d+1),s.push(u),s.push(1-f))}return l}within(e,t,r){if(!this._finished)throw Error("Data not yet indexed - call index.finish().");let{ids:n,coords:a,nodeSize:o}=this,s=[0,n.length-1,0],l=[],f=r*r;for(;s.length;){let u=s.pop()||0,c=s.pop()||0,d=s.pop()||0;if(c-d<=o){for(let r=d;r<=c;r++)i(a[2*r],a[2*r+1],e,t)<=f&&l.push(n[r]);continue}let p=d+c>>1,m=a[2*p],h=a[2*p+1];i(m,h,e,t)<=f&&l.push(n[p]),(0===u?e-r<=m:t-r<=h)&&(s.push(d),s.push(p-1),s.push(1-u)),(0===u?e+r>=m:t+r>=h)&&(s.push(p+1),s.push(c),s.push(1-u))}return l}}function r(e,t,r,i){n(e,r,i),n(t,2*r,2*i),n(t,2*r+1,2*i+1)}function n(e,t,r){let n=e[t];e[t]=e[r],e[r]=n}function i(e,t,r,n){let i=e-r,a=t-n;return i*i+a*a}return t},et=()=>{addEventListener("message",e=>{let t=e.data.points;0===t.length&&self.postMessage({error:Error("Invalid point data")});let r=new KDBush(t.length,e.data.nodeSize);for(let[e,n]of t)r.add(e,n);r.finish(),postMessage(r.data,[r.data])})};let er=ee(),en=(e,t={nodeSize:16,useWorker:void 0})=>new Promise((r,n)=>{if(e instanceof ArrayBuffer)r(er.from(e));else if((e.length<1e6||!1===t.useWorker)&&!0!==t.useWorker){let n=new er(e.length,t.nodeSize);for(let t of e)n.add(t[0],t[1]);n.finish(),r(n)}else{let i,a,o,s,l,f=(i=ee.toString(),a=et.toString(),o=new Blob([`const createKDBushClass = ${i};KDBush = createKDBushClass();const createWorker = ${a};createWorker();`],{type:"text/javascript"}),l=new Worker(s=URL.createObjectURL(o),{name:"KDBush"}),URL.revokeObjectURL(s),l);f.onmessage=e=>{e.data.error?n(e.data.error):r(er.from(e.data)),f.terminate()},f.postMessage({points:e,nodeSize:t.nodeSize})}}),ei=!0,ea=8,eo=2,es="freeform",el=24,ef="auto",eu=0,ec=Float32Array.BYTES_PER_ELEMENT,ed=["OES_texture_float","OES_element_index_uint","WEBGL_color_buffer_float","EXT_float_blend"],ep={color:[0,0,0,0],depth:1},em="panZoom",eh="lasso",ey="rotate",ev=[em,eh,ey],eg={cubicIn:e=>e*e*e,cubicInOut:m,cubicOut:e=>--e*e*e+1,linear:e=>e,quadIn:e=>e*e,quadInOut:e=>e<.5?2*e*e:-1+(4-2*e)*e,quadOut:e=>e*(2-e)},eb=m,ex="continuous",ew="categorical",eA=[ex,ew],eS="deselect",eE="lassoEnd",eT=[eS,eE],ek=[0,.666666667,1,1],e_=750,eO=500,eC=100,eD=250,eP="lasso",eM="rotate",eI="merge",ej="remove",ez=[eP,eM,eI,ej],eB="ctrl",eR="meta",eL="shift",eF=["alt","cmd",eB,eR,eL],eV={[ej]:"alt",[eM]:"alt",[eP]:eL,[eI]:"cmd"},e$=[.66,.66,.66,1],eN=[0,.55,1,1],eW=[1,1,1,1],eU=[0,0,0,1],eZ=[.66,.66,.66,.2],eG=[0,.55,1,1],eq=[1,1,1,1],eH=[1,1,1,.5],eK=[0,0],eY=new Float32Array([1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]),eQ=[1,1,1,.5],eX=new Set(["z","valueZ","valueA","value1","category"]),eJ=new Set(["w","valueW","valueB","value2","value"]),e0=15e3,e1,e2=Symbol("SKIP_DEPRECATION_VALUE_TRANSLATION"),e3="Points have not been drawn",e5="The instance was already destroyed",e6=(e,t=null)=>null===e?t:e,e4=()=>{if(!t){let e=document.createElement("style");document.head.appendChild(e),t=e.sheet}return t},e8=e=>{let t=e4(),r=t.rules.length;return t.insertRule(e,r),r},e7=e=>{e4().deleteRule(e)},e9=null,te=null,tt=(e,{onDraw:t=h,onStart:r=h,onEnd:n=h,enableInitiator:i=ei,initiatorParentElement:a=document.body,longPressIndicatorParentElement:o=document.body,minDelay:s=ea,minDist:l=eo,pointNorm:f=h,type:u=es,brushSize:c=el}={})=>{let d,p,m,y,v,A,S,E=i,T=a,k=o,_=t,O=r,C=n,D=s,P=l,M=f,I=u,j=c,z=document.createElement("div"),B=Math.random().toString(36).substring(2,5)+Math.random().toString(36).substring(2,5);z.id=`lasso-initiator-${B}`,z.style.position="fixed",z.style.display="flex",z.style.justifyContent="center",z.style.alignItems="center",z.style.zIndex=99,z.style.width="4rem",z.style.height="4rem",z.style.borderRadius="4rem",z.style.opacity=.5,z.style.transform="translate(-50%,-50%) scale(0) rotate(0deg)";let{longPress:R,longPressCircle:L,longPressCircleLeft:F,longPressCircleRight:V,longPressEffect:$}=(d=document.createElement("div"),p=Math.random().toString(36).substring(2,5)+Math.random().toString(36).substring(2,5),d.id=`lasso-long-press-${p}`,d.style.position="fixed",d.style.width="1.25rem",d.style.height="1.25rem",d.style.pointerEvents="none",d.style.transform="translate(-50%,-50%)",(m=document.createElement("div")).style.position="absolute",m.style.top=0,m.style.left=0,m.style.width="1.25rem",m.style.height="1.25rem",m.style.clipPath="inset(0px 0px 0px 50%)",m.style.opacity=0,d.appendChild(m),(y=document.createElement("div")).style.position="absolute",y.style.top=0,y.style.left=0,y.style.width="0.8rem",y.style.height="0.8rem",y.style.border="0.2rem solid currentcolor",y.style.borderRadius="0.8rem",y.style.clipPath="inset(0px 50% 0px 0px)",y.style.transform="rotate(0deg)",m.appendChild(y),(v=document.createElement("div")).style.position="absolute",v.style.top=0,v.style.left=0,v.style.width="0.8rem",v.style.height="0.8rem",v.style.border="0.2rem solid currentcolor",v.style.borderRadius="0.8rem",v.style.clipPath="inset(0px 50% 0px 0px)",v.style.transform="rotate(0deg)",m.appendChild(v),(A=document.createElement("div")).style.position="absolute",A.style.top=0,A.style.left=0,A.style.width="1.25rem",A.style.height="1.25rem",A.style.borderRadius="1.25rem",A.style.background="currentcolor",A.style.transform="scale(0)",A.style.opacity=0,d.appendChild(A),{longPress:d,longPressCircle:m,longPressCircleLeft:y,longPressCircleRight:v,longPressEffect:A}),N=!1,W=!1,U=[],Z=[],G=[],q=[],H=!1,K=null,Y=null,Q=null,X=null,J=null,ee=null,et=null,er=null,en=null,ef=null,eu=()=>{N=!1};window.addEventListener("mouseup",eu);let ec=()=>{z.style.opacity=.5,z.style.transform="translate(-50%,-50%) scale(0) rotate(0deg)"},ed=(e,t)=>{let r=getComputedStyle(e),n=+r.opacity,i=r.transform.match(/([0-9.-]+)+/g),a=+i[0],o=+i[1],s=Math.sqrt(a*a+o*o),l=180/Math.PI*Math.atan2(o,a);return{opacity:n,scale:s,rotate:l=t&&l<=0?360+l:l}},ep=e=>{if(!E||N)return;let t=e.clientX,r=e.clientY;z.style.top=`${r}px`,z.style.left=`${t}px`;let n=ed(z),i=n.opacity,a=n.scale,o=n.rotate;z.style.opacity=i,z.style.transform=`translate(-50%,-50%) scale(${a}) rotate(${o}deg)`,z.style.animation="none",x().then(()=>{null!==e9&&e7(e9),e9=e8(`
-@keyframes scaleInFadeOut {
- 0% {
- opacity: ${i};
- transform: translate(-50%,-50%) scale(${a}) rotate(${o}deg);
- }
- 10% {
- opacity: 1;
- transform: translate(-50%,-50%) scale(1) rotate(${o+20}deg);
- }
- 100% {
- opacity: 0;
- transform: translate(-50%,-50%) scale(0.9) rotate(${o+60}deg);
- }
-}
-`),z.style.animation="2500ms ease scaleInFadeOut 0s 1 normal backwards",x().then(()=>{ec()})})},em=()=>{let{opacity:e,scale:t,rotate:r}=ed(z);z.style.opacity=e,z.style.transform=`translate(-50%,-50%) scale(${t}) rotate(${r}deg)`,z.style.animation="none",x(2).then(()=>{null!==te&&e7(te),te=e8(`
-@keyframes fadeScaleOut {
- 0% {
- opacity: ${e};
- transform: translate(-50%,-50%) scale(${t}) rotate(${r}deg);
- }
- 100% {
- opacity: 0;
- transform: translate(-50%,-50%) scale(0) rotate(${r}deg);
- }
-}
-`),z.style.animation="250ms ease fadeScaleOut 0s 1 normal backwards",x().then(()=>{ec()})})},eh=(e,t,{time:r=e_,extraTime:n=eO,delay:i=eC}={time:e_,extraTime:eO,delay:eC})=>{H=!0;let a=getComputedStyle(R);R.style.color=a.color,R.style.top=`${t}px`,R.style.left=`${e}px`,R.style.animation="none";let o=getComputedStyle(L);L.style.clipPath=o.clipPath,L.style.opacity=o.opacity,L.style.animation="none";let s=ed($);$.style.opacity=s.opacity,$.style.transform=`scale(${s.scale})`,$.style.animation="none";let l=ed(F);F.style.transform=`rotate(${l.rotate}deg)`,F.style.animation="none";let f=ed(V);V.style.transform=`rotate(${f.rotate}deg)`,V.style.animation="none",x().then(()=>{if(!H)return;null!==J&&e7(J),null!==X&&e7(X),null!==Q&&e7(Q),null!==Y&&e7(Y),null!==K&&e7(K);let{rules:e,names:t}=(({time:e=e_,extraTime:t=eO,delay:r=eC,currentColor:n,targetColor:i,effectOpacity:a,effectScale:o,circleLeftRotation:s,circleRightRotation:l,circleClipPath:f,circleOpacity:u})=>{let c,d=s/360,p=(1-d)*e+t,m=Math.round((1-d)*e/p*100),h=Math.round(m/2);return{rules:{main:`
- @keyframes mainIn {
- 0% {
- color: ${n};
- opacity: 0;
- }
- 0%, ${m}% {
- color: ${n};
- opacity: 1;
- }
- 100% {
- color: ${i};
- opacity: 0.8;
- }
- }
-`,effect:(c=m+(100-m)/4,`
- @keyframes effectIn {
- 0%, ${m}% {
- opacity: ${a};
- transform: scale(${o});
- }
- ${c}% {
- opacity: 0.66;
- transform: scale(1.5);
- }
- 99% {
- opacity: 0;
- transform: scale(2);
- }
- 100% {
- opacity: 0;
- transform: scale(0);
- }
- }
-`),circleRight:`
- @keyframes rightSpinIn {
- 0% {
- transform: rotate(${l}deg);
- }
- ${h}%, 100% {
- transform: rotate(180deg);
- }
- }
-`,circleLeft:`
- @keyframes leftSpinIn {
- 0% {
- transform: rotate(${s}deg);
- }
- ${m}%, 100% {
- transform: rotate(360deg);
- }
- }
-`,circle:`
- @keyframes circleIn {
- 0% {
- clip-path: ${f};
- opacity: ${u};
- }
- ${h}% {
- clip-path: ${f};
- opacity: 1;
- }
- ${h+.01}%, 100% {
- clip-path: inset(0);
- opacity: 1;
- }
- }
-`},names:{main:`${p}ms ease-out mainIn ${r}ms 1 normal forwards`,effect:`${p}ms ease-out effectIn ${r}ms 1 normal forwards`,circleLeft:`${p}ms linear leftSpinIn ${r}ms 1 normal forwards`,circleRight:`${p}ms linear rightSpinIn ${r}ms 1 normal forwards`,circle:`${p}ms linear circleIn ${r}ms 1 normal forwards`}}})({time:r,extraTime:n,delay:i,currentColor:a.color||"currentcolor",targetColor:R.dataset.activeColor,effectOpacity:s.opacity||0,effectScale:s.scale||0,circleLeftRotation:l.rotate||0,circleRightRotation:f.rotate||0,circleClipPath:o.clipPath||"inset(0 0 0 50%)",circleOpacity:o.opacity||0});K=e8(e.main),Y=e8(e.effect),Q=e8(e.circleLeft),X=e8(e.circleRight),J=e8(e.circle),R.style.animation=t.main,$.style.animation=t.effect,F.style.animation=t.circleLeft,V.style.animation=t.circleRight,L.style.animation=t.circle})},ey=({time:e=eD}={time:eD})=>{if(!H)return;H=!1;let t=getComputedStyle(R);R.style.color=t.color,R.style.animation="none";let r=getComputedStyle(L);L.style.clipPath=r.clipPath,L.style.opacity=r.opacity,L.style.animation="none";let n=ed($);$.style.opacity=n.opacity,$.style.transform=`scale(${n.scale})`,$.style.animation="none";let i=ed(F,"x"===r.clipPath.slice(-2,-1));F.style.transform=`rotate(${i.rotate}deg)`,F.style.animation="none";let a=ed(V);V.style.transform=`rotate(${a.rotate}deg)`,V.style.animation="none",x().then(()=>{null!==ef&&e7(ef),null!==en&&e7(en),null!==er&&e7(er),null!==et&&e7(et),null!==ee&&e7(ee);let{rules:o,names:s}=(({time:e=eD,currentColor:t,targetColor:r,effectOpacity:n,effectScale:i,circleLeftRotation:a,circleRightRotation:o,circleClipPath:s,circleOpacity:l})=>{let f=a/360,u=f*e,c=Math.min(100,100*f),d=c>50?Math.round((1-50/c)*100):0;return{rules:{main:`
- @keyframes mainOut {
- 0% {
- color: ${t};
- }
- 100% {
- color: ${r};
- }
- }
-`,effect:`
- @keyframes effectOut {
- 0% {
- opacity: ${n};
- transform: scale(${i});
- }
- 99% {
- opacity: 0;
- transform: scale(${i+.5});
- }
- 100% {
- opacity: 0;
- transform: scale(0);
- }
- }
-`,circleRight:`
- @keyframes rightSpinOut {
- 0%, ${d}% {
- transform: rotate(${o}deg);
- }
- 100% {
- transform: rotate(0deg);
- }
-`,circleLeft:`
- @keyframes leftSpinOut {
- 0% {
- transform: rotate(${a}deg);
- }
- 100% {
- transform: rotate(0deg);
- }
- }
-`,circle:`
- @keyframes circleOut {
- 0%, ${d}% {
- clip-path: ${s};
- opacity: ${l};
- }
- ${d+.01}% {
- clip-path: inset(0 0 0 50%);
- opacity: ${l};
- }
- 100% {
- clip-path: inset(0 0 0 50%);
- opacity: 0;
- }
- }
-`},names:{main:`${u}ms linear mainOut 0s 1 normal forwards`,effect:`${u}ms linear effectOut 0s 1 normal forwards`,circleRight:`${u}ms linear leftSpinOut 0s 1 normal forwards`,circleLeft:`${u}ms linear rightSpinOut 0s 1 normal forwards`,circle:`${u}ms linear circleOut 0s 1 normal forwards`}}})({time:e,currentColor:t.color||"currentcolor",targetColor:R.dataset.color,effectOpacity:n.opacity||0,effectScale:n.scale||0,circleLeftRotation:i.rotate||0,circleRightRotation:a.rotate||0,circleClipPath:r.clipPath||"inset(0px)",circleOpacity:r.opacity||1});ee=e8(o.main),et=e8(o.effect),er=e8(o.circleLeft),en=e8(o.circleRight),ef=e8(o.circle),R.style.animation=s.main,$.style.animation=s.effect,F.style.animation=s.circleLeft,V.style.animation=s.circleRight,L.style.animation=s.circle})},ev=()=>{_(U,Z)},eg=e=>{U.push(e),Z.push(e[0],e[1])},eb=e=>{let[t,r]=e,[n,i]=U[0];U[1]=[t,i],U[2]=[t,r],U[3]=[n,r],U[4]=[n,i],Z[2]=t,Z[3]=i,Z[4]=t,Z[5]=r,Z[6]=n,Z[7]=r,Z[8]=n,Z[9]=i},ex=e=>{G.push(e)},ew=(e,t,r)=>{let[n,i]=e,[a,o]=t,s=n-a,l=i-o,f=Math.sqrt([s,l].reduce((e,t)=>e+t**2,0));return[l/f*r,-s/f*r]},eA=e=>{let t=G.at(-1),r=Math.abs(M([0,0])[0]-M([j/2,0])[0]),[n,i]=ew(e,t,r),a=G.length;if(1===a){let e=[t[0]+n,t[1]+i],r=[t[0]-n,t[1]-i];U.push(e,r),Z.push(e[0],e[1],r[0],r[1]),q.push([n,i])}else{[n,i]=ew(e,t,r);let o=[...q,[n,i]];[n,i]=((e,t,r)=>{if(0===e.length)return 0;if(1===e.length)return e[0];let n=2**(-1/t),i=Math.max(0,e.length-r),a=e.slice(i),o=0,s=0,l=0;for(let e=a.length-1;e>=0;e--){let t=a.length-1-e,r=n**t;o+=a[e][0]*r,s+=a[e][1]*r,l+=r}return[o/l,s/l]})(o,1,10);let[s,l]=q.at(-1),f=(n+s)/2,u=(i+l)/2,c=[t[0]+f,t[1]+u],d=[t[0]-f,t[1]-u];U.splice(a-1,2,c,d),Z.splice(2*(a-1),4,c[0],c[1],d[0],d[1]),q.splice(a,1,[f,u])}let o=[e[0]+n,e[1]+i],s=[e[0]-n,e[1]-i];U.splice(a,0,o,s),Z.splice(2*a,0,o[0],o[1],s[0],s[1]),G.push(e),q.push([n,i])},eS=eg,eE=eg,eT=e=>{if(S){let t,r;t=e[0],r=e[1],Math.sqrt((t-S[0])**2+(r-S[1])**2)>P&&(S=e,eS(M(e)),U.length>1&&ev())}else{W||(W=!0,O()),S=e;let t=M(e);eE(t)}},ek=w(eT,D,D),eP=(t,r)=>{let n=(t=>{let{left:r,top:n}=e.getBoundingClientRect();return[t.clientX-r,t.clientY-n]})(t);return r?ek(n):eT(n)},eM=()=>{U=[],Z=[],G=[],q=[],S=void 0,ev()},eI=e=>{ep(e)},ej=()=>{N=!0,W=!0,eM(),O()},ez=()=>{em()},eB=({merge:e=!1,remove:t=!1}={})=>{W=!1;let r=[...U],n=[...Z];return ek.cancel(),eM(),r.length>0&&C(r,n,{merge:e,remove:t}),r},eR=e=>"onDraw"===e?_:"onStart"===e?O:"onEnd"===e?C:"enableInitiator"===e?E:"minDelay"===e?D:"minDist"===e?P:"pointNorm"===e?M:"type"===e?I:"brushSize"===e?j:void 0,eL=({onDraw:e=null,onStart:t=null,onEnd:r=null,enableInitiator:n=null,initiatorParentElement:i=null,longPressIndicatorParentElement:a=null,minDelay:o=null,minDist:s=null,pointNorm:l=null,type:f=null,brushSize:u=null}={})=>{_=e6(e,_),O=e6(t,O),C=e6(r,C),E=e6(n,E),D=e6(o,D),P=e6(s,P),M=e6(l,M),j=e6(u,j),null!==i&&i!==T&&(T.removeChild(z),i.appendChild(z),T=i),null!==a&&a!==k&&(k.removeChild(R),a.appendChild(R),k=a),E?(z.addEventListener("click",eI),z.addEventListener("mousedown",ej),z.addEventListener("mouseleave",ez)):(z.removeEventListener("mousedown",ej),z.removeEventListener("mouseleave",ez)),null!==f&&(e=>{switch(e){case"rectangle":I=e,eS=eb,eE=eg;break;case"brush":I=e,eS=eA,eE=ex;break;default:I="freeform",eS=eg,eE=eg}})(f)},eF=()=>{T.removeChild(z),k.removeChild(R),window.removeEventListener("mouseup",eu),z.removeEventListener("click",eI),z.removeEventListener("mousedown",ej),z.removeEventListener("mouseleave",ez)};return T.appendChild(z),k.appendChild(R),eL({onDraw:_,onStart:O,onEnd:C,enableInitiator:E,initiatorParentElement:T,type:I,brushSize:j}),((...e)=>t=>e.reduce((e,t)=>t(e),t))(b("initiator",z),b("longPressIndicator",R),e=>g(e,{clear:eM,destroy:eF,end:eB,extend:eP,get:eR,set:eL,showInitiator:ep,hideInitiator:em,showLongPressIndicator:eh,hideLongPressIndicator:ey}),e=>g({__proto__:{constructor:tt}},e))({})},tr=(e,t)=>!!e&&ed.reduce((r,n)=>e.hasExtension(n)?r:(t||console.warn(`WebGL: ${n} extension not supported. Scatterplot might not render properly`),!1),!0),tn=e=>{let t=e.getContext("webgl",{antialias:!0,preserveDrawingBuffer:!0}),r=[];for(let e of ed)t.getExtension(e)?r.push(e):console.warn(`WebGL: ${e} extension not supported. Scatterplot might not render properly`);return(0,p.default)({gl:t,extensions:r})},ti=(e,t,r,n)=>Math.sqrt((e-r)**2+(t-n)**2),ta=/^#?([a-f\d])([a-f\d])([a-f\d])$/i,to=(e,t,{minLength:r=0}={})=>Array.isArray(e)&&e.length>=r&&e.every(t),ts=e=>!Number.isNaN(+e)&&+e>=0,tl=e=>!Number.isNaN(+e)&&+e>0,tf=(e,t)=>r=>e.indexOf(r)>=0?r:t,tu=(e,t,r=e0)=>new Promise((n,i)=>{((e,t=!1,r=e0)=>new Promise((n,i)=>{let a=new Image;t&&(a.crossOrigin="anonymous"),a.src=e,a.onload=()=>{n(a)};let o=()=>{i(Error("IMAGE_LOAD_ERROR"))};a.onerror=o,setTimeout(o,r)}))(t,0!==t.indexOf(window.location.origin)&&-1===t.indexOf("base64"),r).then(t=>{n(e.texture(t))}).catch(e=>{i(e)})}),tc=/(^#[0-9A-F]{6}$)|(^#[0-9A-F]{3}$)/i,td=e=>e>=0&&e<=1,tp=e=>Array.isArray(e)&&e.every(td),tm=(e,[t,r]=[])=>{let n=0;for(let i=0,a=e.length-2;ir&&(l-o)*(r-s)-(t-o)*(f-s)>0&&n++:f<=r&&(l-o)*(r-s)-(t-o)*(f-s)<0&&n--,a=i}return 0!==n},th=e=>"string"==typeof e||e instanceof String,ty=e=>Number.isInteger(e)&&e>=0&&e<=255,tv=e=>Array.isArray(e)&&e.every(ty),tg=e=>Array.isArray(e)&&e.length>0&&(Array.isArray(e[0])||th(e[0])),tb=(e,t)=>e>t?e:t,tx=(e,t)=>e{if(4===e.length&&(tp(e)||tv(e))){let r=tp(e);return t&&r||!(t||r)?e:t&&!r?e.map(e=>e/255):e.map(e=>255*e)}if(3===e.length&&(tp(e)||tv(e))){let r=255**!t,n=tp(e);return t&&n||!(t||n)?[...e,r]:t&&!n?[...e.map(e=>e/255),r]:[...e.map(e=>255*e),r]}return tc.test(e)?((e,t=!1)=>[...((e,t=!1)=>e.replace(ta,(e,t,r,n)=>`#${t}${t}${r}${r}${n}${n}`).substring(1).match(/.{2}/g).map(e=>Number.parseInt(e,16)/255**t))(e,t),255**!t])(e,t):(console.warn("Only HEX, RGB, and RGBA are handled by this function. Returning white instead."),t?[1,1,1,1]:[255,255,255,255])},tA=e=>.21*e[0]+.72*e[1]+.07*e[2],tS=e=>new Promise((t,r)=>{if(!e||Array.isArray(e))t(e);else{let n,i,a=Array.isArray(e.x)||ArrayBuffer.isView(e.x)?e.x.length:0,o=(Array.isArray(e.x)||ArrayBuffer.isView(e.x))&&(t=>e.x[t]),s=(Array.isArray(e.y)||ArrayBuffer.isView(e.y))&&(t=>e.y[t]),l=(Array.isArray(e.line)||ArrayBuffer.isView(e.line))&&(t=>e.line[t]),f=(Array.isArray(e.lineOrder)||ArrayBuffer.isView(e.lineOrder))&&(t=>e.lineOrder[t]),u=Object.keys(e),c=(n=u.find(e=>eX.has(e)))&&(Array.isArray(e[n])||ArrayBuffer.isView(e[n]))&&(t=>e[n][t]),d=(i=u.find(e=>eJ.has(e)))&&(Array.isArray(e[i])||ArrayBuffer.isView(e[i]))&&(t=>e[i][t]);o&&s&&c&&d&&l&&f?t(e.x.map((e,t)=>[e,s(t),c(t),d(t),l(t),f(t)])):o&&s&&c&&d&&l?t(Array.from({length:a},(e,t)=>[o(t),s(t),c(t),d(t),l(t)])):o&&s&&c&&d?t(Array.from({length:a},(e,t)=>[o(t),s(t),c(t),d(t)])):o&&s&&c?t(Array.from({length:a},(e,t)=>[o(t),s(t),c(t)])):o&&s?t(Array.from({length:a},(e,t)=>[o(t),s(t)])):r(Error("You need to specify at least x and y"))}}),tE=e=>Number.isFinite(e.y)&&!("x"in e),tT=e=>Number.isFinite(e.x)&&!("y"in e),tk=e=>Number.isFinite(e.x)&&Number.isFinite(e.y)&&Number.isFinite(e.width)&&Number.isFinite(e.height),t_=e=>Number.isFinite(e.x1)&&Number.isFinite(e.y1)&&Number.isFinite(e.x2)&&Number.isFinite(e.x2),tO=e=>"vertices"in e&&e.vertices.length>1,tC=(e={})=>{let{regl:t,canvas:r=document.createElement("canvas"),gamma:n=1}=e,i=!1;t||(t=tn(r));let a=tr(t),o=[r.width,r.height],s=t.framebuffer({width:o[0],height:o[1],colorFormat:"rgba",colorType:"float"}),l=t({vert:`
- precision highp float;
- attribute vec2 xy;
- void main () {
- gl_Position = vec4(xy, 0, 1);
- }`,frag:`
- precision highp float;
- uniform vec2 srcRes;
- uniform sampler2D src;
- uniform float gamma;
-
- vec3 approxLinearToSRGB (vec3 rgb, float gamma) {
- return pow(clamp(rgb, vec3(0), vec3(1)), vec3(1.0 / gamma));
- }
-
- void main () {
- vec4 color = texture2D(src, gl_FragCoord.xy / srcRes);
- gl_FragColor = vec4(approxLinearToSRGB(color.rgb, gamma), color.a);
- }`,attributes:{xy:[-4,-4,4,-4,0,4]},uniforms:{src:()=>s,srcRes:()=>o,gamma:()=>n},count:3,depth:{enable:!1},blend:{enable:!0,func:{srcRGB:"one",srcAlpha:"one",dstRGB:"one minus src alpha",dstAlpha:"one minus src alpha"}}}),f=new Set,u=t.frame(()=>{let e=f.values(),t=e.next();for(;!t.done;)t.value(),t=e.next()}),c=(e,t)=>{let n=void 0===e?Math.min(window.innerWidth,window.screen.availWidth):e,i=void 0===t?Math.min(window.innerHeight,window.screen.availHeight):t;r.width=n*window.devicePixelRatio,r.height=i*window.devicePixelRatio,o[0]=r.width,o[1]=r.height,s.resize(...o)},d=()=>{c()};return e.canvas||(window.addEventListener("resize",d),window.addEventListener("orientationchange",d),c()),{get canvas(){return r},get regl(){return t},get gamma(){return n},set gamma(newGamma){n=+newGamma},get isSupported(){return a},get isDestroyed(){return i},render:(e,n)=>{let i;t.clear(ep),s.use(()=>{t.clear(ep),e()}),l(),(i=n.getContext("2d")).clearRect(0,0,n.width,n.height),i.drawImage(r,(r.width-n.width)/2,(r.height-n.height)/2,n.width,n.height,0,0,n.width,n.height)},resize:c,onFrame:e=>(f.add(e),()=>{f.delete(e)}),refresh:()=>{t.poll()},destroy:()=>{i=!0,window.removeEventListener("resize",d),window.removeEventListener("orientationchange",d),u.cancel(),r=void 0,t.destroy(),t=void 0}}},tD=`
-precision mediump float;
-
-uniform sampler2D texture;
-
-varying vec2 uv;
-
-void main () {
- gl_FragColor = texture2D(texture, uv);
-}
-`,tP=`
-precision mediump float;
-
-uniform mat4 modelViewProjection;
-
-attribute vec2 position;
-
-varying vec2 uv;
-
-void main () {
- uv = position;
- gl_Position = modelViewProjection * vec4(-1.0 + 2.0 * uv.x, 1.0 - 2.0 * uv.y, 0, 1);
-}
-`,tM=`precision highp float;
-
-varying vec4 color;
-
-void main() {
- gl_FragColor = color;
-}
-`,tI=`precision highp float;
-
-uniform sampler2D startStateTex;
-uniform sampler2D endStateTex;
-uniform float t;
-
-varying vec2 particleTextureIndex;
-
-void main() {
- // Interpolate x, y, and value
- vec3 start = texture2D(startStateTex, particleTextureIndex).xyw;
- vec3 end = texture2D(endStateTex, particleTextureIndex).xyw;
- vec3 curr = start * (1.0 - t) + end * t;
-
- // The category cannot be interpolated
- float endCategory = texture2D(endStateTex, particleTextureIndex).z;
-
- gl_FragColor = vec4(curr.xy, endCategory, curr.z);
-}`,tj=`precision highp float;
-
-attribute vec2 position;
-varying vec2 particleTextureIndex;
-
-void main() {
- // map normalized device coords to texture coords
- particleTextureIndex = 0.5 * (1.0 + position);
-
- gl_Position = vec4(position, 0, 1);
-}`,tz=`
-precision highp float;
-
-uniform float antiAliasing;
-
-varying vec4 color;
-varying float finalPointSize;
-
-float linearstep(float edge0, float edge1, float x) {
- return clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0);
-}
-
-void main() {
- vec2 c = gl_PointCoord * 2.0 - 1.0;
- float sdf = length(c) * finalPointSize;
- float alpha = linearstep(finalPointSize + antiAliasing, finalPointSize - antiAliasing, sdf);
-
- gl_FragColor = vec4(color.rgb, alpha * color.a);
-}
-`,tB=function(){let e=(e,t,r,n,i)=>{let a=(n-t)*.5,o=(i-r)*.5;return(2*r-2*n+a+o)*e*e*e+(-3*r+3*n-2*a-o)*e*e+a*e+r},t=(t,r,n)=>{let i=n*t,a=Math.floor(i),o=i-a,s=r[Math.max(0,a-1)],l=r[a],f=r[Math.min(n,a+1)],u=r[Math.min(n,a+2)];return[e(o,s[0],l[0],f[0],u[0]),e(o,s[1],l[1],f[1],u[1])]},r=(e,t,r,n)=>(e-r)**2+(t-n)**2,n=(e,t,r)=>{let n=t[0],i=t[1],a=r[0]-n,o=r[1]-i;if(0!==a||0!==o){let t=((e[0]-n)*a+(e[1]-i)*o)/(a*a+o*o);t>1?(n=r[0],i=r[1]):t>0&&(n+=a*t,i+=o*t)}return(a=e[0]-n)*a+(o=e[1]-i)*o},i=(e,t,r,a,o)=>{let s,l=a;for(let i=t+1;il&&(s=i,l=a)}l>a&&(s-t>1&&i(e,t,s,a,o),o.push(e[s]),r-s>1&&i(e,s,r,a,o))},a=(e,t)=>{let r=e.length-1,n=[e[0]];return i(e,0,r,t,n),n.push(e[r]),n};self.onmessage=function(e){var n;let i,o;(e.data.points?+e.data.points.length:0)||self.postMessage({error:Error("No points provided")}),e.data.points;let s=(n=e.data.points,i={},o=!Number.isNaN(+n[0][5]),n.forEach(e=>{let t=e[4];i[t]||(i[t]=[]),o?i[t][e[5]]=e:i[t].push(e)}),Object.entries(i).forEach(e=>{i[e[0]]=e[1].filter(e=>e),i[e[0]].reference=e[1][0]}),i);self.postMessage({points:Object.entries(s).reduce((n,i)=>(n[i[0]]=((e,{maxIntPointsPerSegment:n=100,tolerance:i=.002}={})=>{let o,s=e.length,l=s-1,f=l*n+1,u=i**2,c=[];for(let i=0;iu&&(s.push(c),o=c)}s.push(e[i+1]),s=a(s,u),c=c.concat(s.slice(0,s.length-1))}return c.push(e[e.length-1].slice(0,2)),c.flat()})(i[1],e.data.options),n[i[0]].reference=i[1].reference,n),{})})}},tR={showRecticle:{replacement:"showReticle",removalVersion:"2",translation:h},recticleColor:{replacement:"reticleColor",removalVersion:"2",translation:h},keyMap:{replacement:"actionKeyMap",removalVersion:"2",translation:e=>Object.entries(e).reduce((e,[t,r])=>(e[r]?e[r]=[...e[r],t]:e[r]=t,e),{})}},tL=e=>{for(let t of Object.keys(e).filter(e=>tR[e])){let{replacement:r,removalVersion:n,translation:i}=tR[t];console.warn(`regl-scatterplot: the "${t}" property is deprecated and will be removed in v${n}. Please use "${r}" instead.`),e[tR[t].replacement]=e[t]!==e2?i(e[t]):e[t],delete e[t]}return e},tF=(e,t,{allowSegment:r=!1,allowDensity:n=!1,allowInherit:i=!1}={})=>eX.has(e)?"valueZ":eJ.has(e)?"valueW":"segment"===e?r?"segment":t:"density"===e?n?"density":t:"inherit"===e&&i?"inherit":t,tV=e=>{switch(e){case"valueZ":return 2;case"valueW":return 3;default:return null}},t$=(e={})=>{var t;let r,n,f,u,c,d,p,m,g,b,x,A,L,F,U,Z,G,q,H,K,Y,Q,X,ee,et,er,ei,ea,eo,es,el,ed,ep,eX,eJ,e6,e4,e8,e7=(t={async:!e.syncEvents,caseInsensitive:!0},eJ=!!t?.async,e6=!!t?.caseInsensitive,{publish:o(e4=t?.stack||l(),{async:eJ,caseInsensitive:e6}),subscribe:i(e4,{caseInsensitive:e6}),unsubscribe:a(e4,{caseInsensitive:e6}),clear:s(e4),stack:e4}),e9=new Float32Array(16),te=new Float32Array(16),tr=[0,0];tL(e);let{renderer:tn,antiAliasing:ta=.5,pixelAligned:tc=!1,backgroundColor:td=eU,backgroundImage:tp=null,canvas:ty=document.createElement("canvas"),colorBy:tv=null,deselectOnDblClick:tR=!0,deselectOnEscape:t$=!0,lassoColor:tN=ek,lassoLineWidth:tW=2,lassoMinDelay:tU=10,lassoMinDist:tZ=3,lassoClearEvent:tG=eE,lassoInitiator:tq=!1,lassoInitiatorParentElement:tH=document.body,lassoLongPressIndicatorParentElement:tK=document.body,lassoOnLongPress:tY=!1,lassoLongPressTime:tQ=e_,lassoLongPressAfterEffectTime:tX=eO,lassoLongPressEffectDelay:tJ=eC,lassoLongPressRevertEffectTime:t0=eD,lassoType:t1="lasso",lassoBrushSize:t2=24,actionKeyMap:t3=eV,mouseMode:t5=em,showReticle:t6=!1,reticleColor:t4=eQ,pointColor:t8=e$,pointColorActive:t7=eN,pointColorHover:t9=eW,showPointConnections:re=!1,pointConnectionColor:rt=eZ,pointConnectionColorActive:rr=eG,pointConnectionColorHover:rn=eq,pointConnectionColorBy:ri=null,pointConnectionOpacity:ra=null,pointConnectionOpacityBy:ro=null,pointConnectionOpacityActive:rs=.66,pointConnectionSize:rl=2,pointConnectionSizeActive:rf=2,pointConnectionSizeBy:ru=null,pointConnectionMaxIntPointsPerSegment:rc=100,pointConnectionTolerance:rd=.002,pointSize:rp=6,pointSizeSelected:rm=2,pointSizeMouseDetection:rh="auto",pointOutlineWidth:ry=2,opacity:rv=ef,opacityBy:rg=null,opacityByDensityFill:rb=.15,opacityInactiveMax:rx=1,opacityInactiveScale:rw=1,sizeBy:rA=null,pointScaleMode:rS="asinh",height:rE=ef,width:rT=ef,annotationLineColor:rk=eH,annotationLineWidth:r_=1,annotationHVLineLimit:rO=1e3,cameraIsFixed:rC=!1}=e,rD=rT===ef?1:rT,rP=rE===ef?1:rE,{performanceMode:rM=!1,opacityByDensityDebounceTime:rI=25,spatialIndexUseWorker:rj=e1}=e,rz=!!(e.renderPointsAsSquares||rM),rB=!!(e.disableAlphaBlending||rM);t5=tf(ev,em)(t5),tn||(tn=tC({regl:e.regl,gamma:e.gamma})),td=tw(td,!0),tN=tw(tN,!0),t4=tw(t4,!0);let rR=!1,rL=!1,rF=tA(td),rV=!1,r$=null,rN=[0,0],rW=-1,rU=[],rZ=new Set,rG=new Set,rq=!1,rH=new Set,rK=[],rY=0,rQ=0,rX=!1,rJ=[],r0=e.aspectRatio||1,r1=!1,r2=!0,r3=!1;t8=tg(t8)?[...t8]:[t8],t7=tg(t7)?[...t7]:[t7],t9=tg(t9)?[...t9]:[t9],t8=t8.map(e=>tw(e,!0)),t7=t7.map(e=>tw(e,!0)),t9=t9.map(e=>tw(e,!0)),rv=to(rv=!Array.isArray(rv)&&Number.isNaN(+rv)?t8[0][3]:rv,ts,{minLength:1})?[...rv]:[rv];let r5=1/(rp=to(rp,ts,{minLength:1})?[...rp]:[rp])[0];rt="inherit"===rt?[...t8]:(rt=tg(rt)?[...rt]:[rt]).map(e=>tw(e,!0)),rr="inherit"===rr?[...t7]:(rr=tg(rr)?[...rr]:[rr]).map(e=>tw(e,!0)),rn="inherit"===rn?[...t9]:(rn=tg(rn)?[...rn]:[rn]).map(e=>tw(e,!0)),ra="inherit"===ra?[...rv]:to(ra,ts,{minLength:1})?[...ra]:[ra],rl="inherit"===rl?[...rp]:to(rl,ts,{minLength:1})?[...rl]:[rl],tv=tF(tv,null),rg=tF(rg,null,{allowDensity:!0}),rA=tF(rA,null),ri=tF(ri,null,{allowSegment:!0,allowInherit:!0}),ro=tF(ro,null,{allowSegment:!0}),ru=tF(ru,null,{allowSegment:!0});let r6=0,r4=0,r8=!1,r7=null,r9=t6,ne=0,nt=0,nr=!1,nn=!1,ni=!1,na=!1,no=ew,ns=ew,nl=!1,nf=e.xScale||null,nu=e.yScale||null,nc=0,nd=0,np=0,nm=0;nf&&(nc=nf.domain()[0],nd=nf.domain()[1]-nf.domain()[0],nf.range([0,rD])),nu&&(np=nu.domain()[0],nm=nu.domain()[1]-nu.domain()[0],nu.range([rP,0]));let nh=e=>-1+e/rD*2,ny=e=>1+-(e/rP*2),nv=(e,t)=>{let n=[e,t,1,1];return C(n,n,T(e9,k(e9,d,k(e9,r.view,m)))),n.slice(0,2)},ng=(e=0)=>{let t=id(),r=(Z[1]-G[1])/ty.height;return(F*t+e)*r},nb=()=>rq?rK.filter((e,t)=>rH.has(t)):rK,nx=(e,t,r,n)=>{let i=u.range(e,t,r,n);return rq?i.filter(e=>rH.has(e)):i},nw=()=>{let[e,t]=[nh(tr[0]),ny(tr[1])],[r,n]=nv(e,t),i=ng(4),a=nx(r-i,n-i,r+i,n+i),o=i,s=-1;for(let e of a){let[t,i]=rK[e],a=ti(t,i,r,n);a{rJ=[],n&&n.clear()},nS=e=>e&&e.length>4,nE=(e,t)=>{if(x||!re||!nS(rK[e[0]]))return;let r=0===t,n=1===t?e=>rG.add(e):h,i=Object.keys(e.reduce((e,t)=>{let r=rK[t];return e[Array.isArray(r[4])?r[4][0]:r[4]]=!0,e},{})),a=g.getData().opacities;for(let e of i.filter(e=>!rG.has(+e))){let t=b[e][0],i=b[e][2],o=4*t+2*b[e][3],s=o+2*i+4;void 0===a.__original__&&(a.__original__=a.slice());for(let e=o;e[e%r6/r6+r4,Math.floor(e/r6)/r6+r4],nk=e=>rq&&!rH.has(e),n_=({preventEvent:e=!1}={})=>{tG===eS&&nA(),rU.length>0&&(e||e7.publish("deselect"),rG.clear(),nE(rU,0),rU=[],rZ.clear(),r2=!0)},nO=(e,{merge:t=!1,remove:r=!1,preventEvent:n=!1}={})=>{let i=Array.isArray(e)?e:[e],a=[...rU];if(t){let e;if(e=[],rU.forEach(t=>{e[t]=!0}),i.forEach(t=>{e[t]=!0}),rU=e.reduce((e,t,r)=>(t&&e.push(r),e),[]),a.length===rU.length){r2=!0;return}}else if(r){let e=new Set(i);if(rU=rU.filter(t=>!e.has(t)),a.length===rU.length){r2=!0;return}}else{if(rU?.length>0&&nE(rU,0),a.length>0&&0===i.length)return void n_({preventEvent:n});rU=i}if(y(a,rU)){r2=!0;return}let o=[];rZ.clear(),rG.clear();for(let e=rU.length-1;e>=0;e--){let t=rU[e];if(t<0||t>=rY||nk(t)){rU.splice(e,1);continue}rZ.add(t),o.push.apply(o,nT(t))}ee({usage:"dynamic",type:"float",data:o}),nE(rU,1),n||e7.publish("select",{points:rU}),r2=!0},nC=(e,{showReticleOnce:t=!1,preventEvent:r=!1}={})=>{let n=!1;if(!(rq&&!rH.has(e))&&e>=0&&e=0&&i&&!rZ.has(t)&&nE([t],0),eX=e,et.subdata(nT(e)),rZ.has(e)||nE([e],2),i&&!r&&e7.publish("pointover",eX)}else(n=+eX>=0)&&(rZ.has(eX)||nE([eX],0),r||e7.publish("pointout",eX)),eX=void 0;n&&(r2=!0,r3=t)},nD=e=>{let t=ty.getBoundingClientRect();return tr[0]=e.clientX-t.left,tr[1]=e.clientY-t.top,[...tr]},nP=tt(ty,{onStart:()=>{r.config({isFixed:!0}),rV=!0,rX=!0,nA(),rW>=0&&(clearTimeout(rW),rW=-1),e7.publish("lassoStart")},onDraw:(e,t)=>{rJ=e,n.setPoints(t),e7.publish("lassoExtend",{coordinates:e})},onEnd:(e,t,{merge:n=!1,remove:i=!1}={})=>{r.config({isFixed:rC}),rJ=[...e],nO((e=>{let t=(e=>{let t=1/0,r=-1/0,n=1/0,i=-1/0;for(let a=0;ar?e[a]:r,n=e[a+1]i?e[a+1]:i;return[t,n,r,i]})(e);if(!(([e,t,r,n])=>Number.isFinite(e)&&Number.isFinite(t)&&Number.isFinite(r)&&Number.isFinite(n)&&r-e>0&&n-t>0)(t))return[];let r=nx(...t),n=[];for(let t of r)tm(e,rK[t])&&n.push(t);return n})(t),{merge:n,remove:i}),e7.publish("lassoEnd",{coordinates:rJ}),tG===eE&&nA()},enableInitiator:tq,initiatorParentElement:tH,longPressIndicatorParentElement:tK,pointNorm:([e,t])=>nv(nh(e),ny(t)),minDelay:tU,minDist:"brush"===t1?Math.max(3,tZ):tZ,type:t1}),nM=(e,t)=>{switch(t3[t]){case"alt":return e.altKey;case"cmd":return e.metaKey;case eB:return e.ctrlKey;case eR:return e.metaKey;case eL:return e.shiftKey;default:return!1}},nI=e=>{nn&&1===e.buttons&&(rV=!0,r$=performance.now(),rN=nD(e),(rX=t5===eh||nM(e,eP))||!tY||(nP.showLongPressIndicator(e.clientX,e.clientY,{time:tQ,extraTime:tX,delay:tJ}),rW=setTimeout(()=>{rW=-1,rX=!0},tQ)))},nj=e=>{nn&&(rV=!1,rW>=0&&(clearTimeout(rW),rW=-1),rX&&(e.preventDefault(),rX=!1,nP.end({merge:nM(e,eI),remove:nM(e,ej)})),tY&&nP.hideLongPressIndicator({time:t0}))},nz=e=>{if(!nn||(e.preventDefault(),ti(...nD(e),...rN)>=tZ))return;let t=performance.now()-r$;if(!tq||t<500){let t=nw();t>=0?(rU.length>0&&tG===eS&&nA(),nO([t],{merge:nM(e,eI),remove:nM(e,ej)})):U||(U=setTimeout(()=>{U=null,nP.showInitiator(e)},200))}},nB=e=>{nP.hideInitiator(),U&&(clearTimeout(U),U=null),tR&&(e.preventDefault(),n_())},nR=e=>{!na&&(nl=document.elementsFromPoint(e.clientX,e.clientY).some(e=>e===ty),na=!0);if(!(nn&&(nl||rV)))return;let t=ti(...nD(e),...rN)>=tZ;nl&&!rX&&nC(nw()),rX?(e.preventDefault(),nP.extend(e,!0)):rV&&tY&&t&&nP.hideLongPressIndicator({time:t0}),rW>=0&&t&&(clearTimeout(rW),rW=-1),rV&&(r2=!0)},nL=()=>{eX=void 0,nl=!1,na=!1,nn&&(+eX>=0&&!rZ.has(eX)&&nE([eX],0),nj(),r2=!0)},nF=()=>{let e=Math.max(rp.length,rv.length),t=new Float32Array((nt=Math.max(2,Math.ceil(Math.sqrt(e))))**2*4);for(let r=0;r{let n=e.length,i=t.length,a=r.length,o=[];if(n===i&&i===a)for(let i=0;i{let e=nV(),t=new Float32Array((ne=Math.max(2,Math.ceil(Math.sqrt(e.length))))**2*4);return e.forEach((e,r)=>{t[4*r]=e[0],t[4*r+1]=e[1],t[4*r+2]=e[2],t[4*r+3]=e[3]}),tn.regl.texture({data:t,shape:[ne,ne,4],type:"float"})},nN=()=>{d=O([],[1/(c=rD/rP),1,1]),p=O([],[1/c,1,1]),m=O([],[r0,1,1])},nW=(e,t)=>r=>{var n;if(!r||0===r.length)return;let i=[...e()],a=tg(r)?r:[r];if(a=a.map(e=>tw(e,!0)),n=a,!(Array.isArray(i)&&Array.isArray(n)&&i.length===n.length&&(0===i.length||Array.isArray(i[0])&&Array.isArray(n[0])&&i.every(([e,t,r,i],a)=>{let[o,s,l,f]=n[a];return e===o&&t===s&&r===l&&i===f})))){ed&&ed.destroy();try{t(a),ed=n$()}catch(e){console.error("Invalid colors. Switching back to default colors."),t(i),ed=n$()}}},nU=nW(()=>t8,e=>{t8=e}),nZ=nW(()=>t7,e=>{t7=e}),nG=nW(()=>t9,e=>{t9=e}),nq=()=>{let e,t,r,n,i,a;if(!(nf||nu))return;let[o,s]=(e=nv(-1,-1),t=nv(1,1),r=(e[0]+1)/2,n=(t[0]+1)/2,i=(e[1]+1)/2,a=(t[1]+1)/2,[[nc+r*nd,nc+n*nd],[np+i*nm,np+a*nm]]);nf&&nf.domain(o),nu&&nu.domain(s)},nH=(e,t)=>{rP=Math.max(1,e),ty.height=Math.floor(rP*window.devicePixelRatio),nu&&(nu.range([rP,0]),t||nq())},nK=e=>{if(e===ef){rE=e,ty.style.height="100%",window.requestAnimationFrame(()=>{ty&&nH(ty.getBoundingClientRect().height)});return}+e&&!(0>=+e)&&(nH(rE=+e),ty.style.height=`${rE}px`)},nY=()=>{F=rh,rh===ef&&(F=Array.isArray(rp)?rp.reduce((e,t)=>t>e?t:e,-1/0):rp)},nQ=e=>{let t=Array.isArray(rp)?[...rp]:rp;to(e,ts,{minLength:1})?rp=[...e]:tl(+e)&&(rp=[+e]),t===rp||y(t,rp)||(ep&&ep.destroy(),r5=1/rp[0],ep=nF(),nY())},nX=(e,t)=>{rD=Math.max(1,e),ty.width=Math.floor(rD*window.devicePixelRatio),nf&&(nf.range([0,rD]),t||nq())},nJ=e=>{if(e===ef){rT=e,ty.style.width="100%",window.requestAnimationFrame(()=>{ty&&nX(ty.getBoundingClientRect().width)});return}+e&&!(0>=+e)&&(nX(rT=+e),ty.style.width=`${rD}px`)},n0=e=>{switch(e){case"valueZ":return no;case"valueW":return ns;default:return null}},n1=(e,t)=>e===ex?e=>Math.round(e*(t.length-1)):h,n2=()=>ta,n3=()=>[ty.width,ty.height],n5=()=>ed,n6=()=>ne,n4=()=>.5/ne,n8=()=>window.devicePixelRatio,n7=()=>ee,n9=()=>ep,ie=()=>nt,it=()=>.5/nt,ir=()=>0,ii=()=>Y||H,ia=()=>r6,io=()=>.5/r6,is=()=>k(te,p,k(te,r.view,m)),il=()=>window.devicePixelRatio,iu=()=>tb(r5,r.scaling[0])*window.devicePixelRatio,ic=()=>r.scaling[0]>1?Math.asinh(tb(1,r.scaling[0]))/Math.asinh(1)*window.devicePixelRatio:tb(r5,r.scaling[0])*window.devicePixelRatio,id=ic;"linear"===rS?id=iu:"constant"===rS&&(id=il);let ip=()=>rq?rH.size:rY,im=()=>rU.length,ih=()=>im()>0?rx:1,iy=()=>im()>0?rw:1,iv=()=>+("valueZ"===tv),ig=()=>+("valueW"===tv),ib=()=>+("valueZ"===rg),ix=()=>+("valueW"===rg),iw=()=>+("density"===rg),iA=()=>+("valueZ"===rA),iS=()=>+("valueW"===rA),iE=()=>+tc,iT=()=>"valueZ"===tv?no===ex?t8.length-1:1:ns===ex?t8.length-1:1,ik=()=>"valueZ"===rg?no===ex?rv.length-1:1:ns===ex?rv.length-1:1,i_=()=>"valueZ"===rA?no===ex?rp.length-1:1:ns===ex?rp.length-1:1,iO=e=>{if("density"!==rg)return 1;let t=id(),n=rp[0]*t,i=2/(2/r.view[0])*(2/(2/r.view[5])),a=e.viewportHeight,o=e.viewportWidth,s=rb*o*a/(rQ*n*n)*tx(1,i);s*=rz?1:1/(.25*Math.PI);let l=tb(1,n)+.5;return tx(1,tb(0,s*=(n/l)**2))},iC=tn.regl({framebuffer:()=>Q,vert:tj,frag:tI,attributes:{position:[-4,0,4,4,4,-4]},uniforms:{startStateTex:()=>K,endStateTex:()=>H,t:(e,t)=>t.t},count:3}),iD=(e,t,r,n=eu,i=ih,a=iy)=>tn.regl({frag:rz?tM:tz,vert:`
-precision highp float;
-
-uniform sampler2D colorTex;
-uniform float colorTexRes;
-uniform float colorTexEps;
-uniform sampler2D stateTex;
-uniform float stateTexRes;
-uniform float stateTexEps;
-uniform float devicePixelRatio;
-uniform sampler2D encodingTex;
-uniform float encodingTexRes;
-uniform float encodingTexEps;
-uniform float pointSizeExtra;
-uniform float pointOpacityMax;
-uniform float pointOpacityScale;
-uniform float numPoints;
-uniform float globalState;
-uniform float isColoredByZ;
-uniform float isColoredByW;
-uniform float isOpacityByZ;
-uniform float isOpacityByW;
-uniform float isOpacityByDensity;
-uniform float isSizedByZ;
-uniform float isSizedByW;
-uniform float isPixelAligned;
-uniform float colorMultiplicator;
-uniform float opacityMultiplicator;
-uniform float opacityDensity;
-uniform float sizeMultiplicator;
-uniform float numColorStates;
-uniform float pointScale;
-uniform float drawingBufferWidth;
-uniform float drawingBufferHeight;
-uniform mat4 modelViewProjection;
-
-attribute vec2 stateIndex;
-
-varying vec4 color;
-varying float finalPointSize;
-
-void main() {
- vec4 state = texture2D(stateTex, stateIndex);
-
- if (isPixelAligned < 0.5) {
- gl_Position = modelViewProjection * vec4(state.x, state.y, 0.0, 1.0);
- } else {
- vec4 clipSpacePosition = modelViewProjection * vec4(state.x, state.y, 0.0, 1.0);
- vec2 ndcPosition = clipSpacePosition.xy / clipSpacePosition.w;
- vec2 pixelPos = 0.5 * (ndcPosition + 1.0) * vec2(drawingBufferWidth, drawingBufferHeight);
- pixelPos = floor(pixelPos + 0.5); // Snap to nearest pixel
- vec2 snappedPosition = (pixelPos / vec2(drawingBufferWidth, drawingBufferHeight)) * 2.0 - 1.0;
- gl_Position = vec4(snappedPosition, 0.0, 1.0);
- }
-
-
- // Determine color index
- float colorIndexZ = isColoredByZ * floor(state.z * colorMultiplicator);
- float colorIndexW = isColoredByW * floor(state.w * colorMultiplicator);
-
- // Multiply by the number of color states per color
- // I.e., normal, active, hover, background, etc.
- float colorIndex = (colorIndexZ + colorIndexW) * numColorStates;
-
- // Half a "pixel" or "texel" in texture coordinates
- float colorLinearIndex = colorIndex + globalState;
-
- // Need to add cEps here to avoid floating point issue that can lead to
- // dramatic changes in which color is loaded as floor(3/2.9999) = 1 but
- // floor(3/3.0001) = 0!
- float colorRowIndex = floor((colorLinearIndex + colorTexEps) / colorTexRes);
-
- vec2 colorTexIndex = vec2(
- (colorLinearIndex / colorTexRes) - colorRowIndex + colorTexEps,
- colorRowIndex / colorTexRes + colorTexEps
- );
-
- color = texture2D(colorTex, colorTexIndex);
-
- // Retrieve point size
- float pointSizeIndexZ = isSizedByZ * floor(state.z * sizeMultiplicator);
- float pointSizeIndexW = isSizedByW * floor(state.w * sizeMultiplicator);
- float pointSizeIndex = pointSizeIndexZ + pointSizeIndexW;
-
- float pointSizeRowIndex = floor((pointSizeIndex + encodingTexEps) / encodingTexRes);
- vec2 pointSizeTexIndex = vec2(
- (pointSizeIndex / encodingTexRes) - pointSizeRowIndex + encodingTexEps,
- pointSizeRowIndex / encodingTexRes + encodingTexEps
- );
- float pointSize = texture2D(encodingTex, pointSizeTexIndex).x;
-
- // Retrieve opacity
- ${3===n?"":`
- if (isOpacityByDensity < 0.5) {
- float opacityIndexZ = isOpacityByZ * floor(state.z * opacityMultiplicator);
- float opacityIndexW = isOpacityByW * floor(state.w * opacityMultiplicator);
- float opacityIndex = opacityIndexZ + opacityIndexW;
-
- float opacityRowIndex = floor((opacityIndex + encodingTexEps) / encodingTexRes);
- vec2 opacityTexIndex = vec2(
- (opacityIndex / encodingTexRes) - opacityRowIndex + encodingTexEps,
- opacityRowIndex / encodingTexRes + encodingTexEps
- );
- color.a = texture2D(encodingTex, opacityTexIndex)[${1+n}];
- } else {
- color.a = min(1.0, opacityDensity + globalState);
- }
- `}
-
- color.a = min(pointOpacityMax, color.a) * pointOpacityScale;
- finalPointSize = (pointSize * pointScale) + pointSizeExtra;
- gl_PointSize = finalPointSize;
-}
-`,blend:{enable:!rB,func:{srcRGB:"src alpha",srcAlpha:"one",dstRGB:"one minus src alpha",dstAlpha:"one minus src alpha"}},depth:{enable:!1},attributes:{stateIndex:{buffer:r,size:2}},uniforms:{antiAliasing:n2,resolution:n3,modelViewProjection:is,devicePixelRatio:n8,pointScale:()=>id(),encodingTex:n9,encodingTexRes:ie,encodingTexEps:it,pointOpacityMax:i,pointOpacityScale:a,pointSizeExtra:e,globalState:n,colorTex:n5,colorTexRes:n6,colorTexEps:n4,stateTex:ii,stateTexRes:ia,stateTexEps:io,isColoredByZ:iv,isColoredByW:ig,isOpacityByZ:ib,isOpacityByW:ix,isOpacityByDensity:iw,isSizedByZ:iA,isSizedByW:iS,isPixelAligned:iE,colorMultiplicator:iT,opacityMultiplicator:ik,opacityDensity:iO,sizeMultiplicator:i_,numColorStates:4,drawingBufferWidth:e=>e.drawingBufferWidth,drawingBufferHeight:e=>e.drawingBufferHeight},count:t,primitive:"points"}),iP=iD(ir,ip,()=>X),iM=iD(ir,()=>1,()=>et,2,()=>1,()=>1),iI=iD(()=>(rm+2*ry)*window.devicePixelRatio,im,n7,1,()=>1,()=>1),ij=iD(()=>(rm+ry)*window.devicePixelRatio,im,n7,3,()=>1,()=>1),iz=iD(()=>rm*window.devicePixelRatio,im,n7,1,()=>1,()=>1),iB=tn.regl({frag:tD,vert:tP,attributes:{position:[0,1,0,0,1,0,0,1,1,1,1,0]},uniforms:{modelViewProjection:is,texture:()=>tp},count:6}),iR=tn.regl({vert:`
- precision mediump float;
- uniform mat4 modelViewProjection;
- attribute vec2 position;
- void main () {
- gl_Position = modelViewProjection * vec4(position, 0, 1);
- }`,frag:`
- precision mediump float;
- uniform vec4 color;
- void main () {
- gl_FragColor = vec4(color.rgb, 0.2);
- }`,depth:{enable:!1},blend:{enable:!0,func:{srcRGB:"src alpha",srcAlpha:"one",dstRGB:"one minus src alpha",dstAlpha:"one minus src alpha"}},attributes:{position:()=>rJ},uniforms:{modelViewProjection:is,color:()=>tN},elements:()=>(function(e,t,r=2){let n,i,a,o=e.length,s=function(e,t,r,n,i){let a;if(i===function(e,t,r,n){let i=0;for(let a=t,o=r-n;a0)for(let t=0;t=0;t-=n)a=$(t/n|0,e[t],e[t+1],a);return a&&B(a,a.next)&&(N(a),a=a.next),a}(e,0,o,r,!0),l=[];if(!s||s.next===s.prev)return l;if(e.length>80*r){n=1/0,i=1/0;let t=-1/0,s=-1/0;for(let a=r;at&&(t=r),o>s&&(s=o)}a=0!==(a=Math.max(t-n,s-i))?32767/a:0}return function e(t,r,n,i,a,o,s){if(!t)return;!s&&o&&function(e,t,r,n){let i=e;do 0===i.z&&(i.z=I(i.x,i.y,t,r,n)),i.prevZ=i.prev,i.nextZ=i.next,i=i.next;while(i!==e)i.prevZ.nextZ=null,i.prevZ=null,function(e){let t,r=1;do{let n,i=e;e=null;let a=null;for(t=0;i;){t++;let o=i,s=0;for(let e=0;e0||l>0&&o;)0!==s&&(0===l||!o||i.z<=o.z)?(n=i,i=i.nextZ,s--):(n=o,o=o.nextZ,l--),a?a.nextZ=n:e=n,n.prevZ=a,a=n;i=o}a.nextZ=null,r*=2}while(t>1)}(i)}(t,i,a,o);let l=t;for(;t.prev!==t.next;){let f=t.prev,u=t.next;if(o?function(e,t,r,n){let i=e.prev,a=e.next;if(z(i,e,a)>=0)return!1;let o=i.x,s=e.x,l=a.x,f=i.y,u=e.y,c=a.y,d=Math.min(o,s,l),p=Math.min(f,u,c),m=Math.max(o,s,l),h=Math.max(f,u,c),y=I(d,p,t,r,n),v=I(m,h,t,r,n),g=e.prevZ,b=e.nextZ;for(;g&&g.z>=y&&b&&b.z<=v;){if(g.x>=d&&g.x<=m&&g.y>=p&&g.y<=h&&g!==i&&g!==a&&j(o,f,s,u,l,c,g.x,g.y)&&z(g.prev,g,g.next)>=0||(g=g.prevZ,b.x>=d&&b.x<=m&&b.y>=p&&b.y<=h&&b!==i&&b!==a&&j(o,f,s,u,l,c,b.x,b.y)&&z(b.prev,b,b.next)>=0))return!1;b=b.nextZ}for(;g&&g.z>=y;){if(g.x>=d&&g.x<=m&&g.y>=p&&g.y<=h&&g!==i&&g!==a&&j(o,f,s,u,l,c,g.x,g.y)&&z(g.prev,g,g.next)>=0)return!1;g=g.prevZ}for(;b&&b.z<=v;){if(b.x>=d&&b.x<=m&&b.y>=p&&b.y<=h&&b!==i&&b!==a&&j(o,f,s,u,l,c,b.x,b.y)&&z(b.prev,b,b.next)>=0)return!1;b=b.nextZ}return!0}(t,i,a,o):function(e){let t=e.prev,r=e.next;if(z(t,e,r)>=0)return!1;let n=t.x,i=e.x,a=r.x,o=t.y,s=e.y,l=r.y,f=Math.min(n,i,a),u=Math.min(o,s,l),c=Math.max(n,i,a),d=Math.max(o,s,l),p=r.next;for(;p!==t;){if(p.x>=f&&p.x<=c&&p.y>=u&&p.y<=d&&j(n,o,i,s,a,l,p.x,p.y)&&z(p.prev,p,p.next)>=0)return!1;p=p.next}return!0}(t)){r.push(f.i,t.i,u.i),N(t),t=u.next,l=u.next;continue}if((t=u)===l){s?1===s?e(t=function(e,t){let r=e;do{let n=r.prev,i=r.next.next;!B(n,i)&&R(n,r,r.next,i)&&V(n,i)&&V(i,n)&&(t.push(n.i,r.i,i.i),N(r),N(r.next),r=e=i),r=r.next}while(r!==e)return M(r)}(M(t),r),r,n,i,a,o,2):2===s&&function(t,r,n,i,a,o){let s=t;do{let t=s.next.next;for(;t!==s.prev;){var l,f;if(s.i!==t.i&&(l=s,f=t,l.next.i!==f.i&&l.prev.i!==f.i&&!function(e,t){let r=e;do{if(r.i!==e.i&&r.next.i!==e.i&&r.i!==t.i&&r.next.i!==t.i&&R(r,r.next,e,t))return!0;r=r.next}while(r!==e)return!1}(l,f)&&(V(l,f)&&V(f,l)&&function(e,t){let r=e,n=!1,i=(e.x+t.x)/2,a=(e.y+t.y)/2;do r.y>a!=r.next.y>a&&r.next.y!==r.y&&i<(r.next.x-r.x)*(a-r.y)/(r.next.y-r.y)+r.x&&(n=!n),r=r.next;while(r!==e)return n}(l,f)&&(z(l.prev,l,f.prev)||z(l,f.prev,f))||B(l,f)&&z(l.prev,l,l.next)>0&&z(f.prev,f,f.next)>0))){let l=function(e,t){let r=W(e.i,e.x,e.y),n=W(t.i,t.x,t.y),i=e.next,a=t.prev;return e.next=t,t.prev=e,r.next=i,i.prev=r,n.next=r,r.prev=n,a.next=n,n.prev=a,n}(s,t);s=M(s,s.next),l=M(l,l.next),e(s,r,n,i,a,o,0),e(l,r,n,i,a,o,0);return}t=t.next}s=s.next}while(s!==t)}(t,r,n,i,a,o):e(M(t),r,n,i,a,o,1);break}}}(s,l,r,n,i,a,0),l})(n.getPoints())}),iL=e=>{let t=new Float32Array(2*e),r=0;for(let n=0;n{let r=e.length;r4=.5/(r6=Math.max(2,Math.ceil(Math.sqrt(r))));let n=new Float32Array(r6**2*4),i=!0,a=!0,o=0,s=0,l=0;for(let t=0;tnew Promise(r=>{nn=!1;let n=t?.preventFilterReset&&e.length===rY;rQ=rY=e.length,H&&H.destroy(),H=iF(e,{z:t.zDataType,w:t.wDataType}),n||X({usage:"static",type:"float",data:iL(rY)}),en(t.spatialIndex||e,{useWorker:rj}).then(t=>{u=t,rK=e,nn=!0}).then(r)}),i$=(e,t)=>{er=r.target,ei=e,ea=r.distance[0],eo=t},iN=e=>new Promise(t=>{g.setPoints([]),e?.length>0?(x=!0,((e,t={tolerance:.002,maxIntPointsPerSegment:100})=>new Promise((r,n)=>{let i=new Worker(window.URL.createObjectURL(new Blob([`(${tB.toString()})()`],{type:"text/javascript"})));i.onmessage=e=>{e.data.error?n(e.data.error):r(e.data.points),i.terminate()},i.postMessage({points:e,options:t})}))(e,{maxIntPointsPerSegment:rc,tolerance:rd}).then(e=>{let r;b=[],r=0,Object.keys(e).forEach((t,n)=>{b[t]=[n,e[t].reference,e[t].length/2,r],r+=e[t].length/2});let n=Object.values(e);g.setPoints(1===n.length?n[0]:n,{colorIndices:(e=>{let t="inherit"===ri?tv:ri;if("segment"===t){let t=rt.length-1;return t<1?[]:e.reduce((e,r,n)=>{let i=0,a=[];for(let e=2;e