diff --git a/benchmarking/InteractiveAgentTester.py b/benchmarking/InteractiveAgentTester.py index a4e81fa..3093930 100644 --- a/benchmarking/InteractiveAgentTester.py +++ b/benchmarking/InteractiveAgentTester.py @@ -1,20 +1,20 @@ #!/usr/bin/env python3 """ -Interactive Agent Tester (Docker **or** Singularity backend) -========================================================== -A unified interactive tester that can drive either the **Docker sandbox** (`benchmarking_sandbox_management.py`) -or the **Apptainer/Singularity sandbox** (`benchmarking_sandbox_management_singularity.py`). - -At launch you choose a backend: - • *docker* – requires Docker daemon on this machine. - • *singularity* – requires `apptainer`/`singularity`; no Docker needed. - -The rest of the behaviour (multi‑turn GPT orchestration, FastAPI kernel execution, -resource upload, unlimited chat loop) is unchanged. +Interactive Agent Tester – Docker, Singularity‑API, or **Singularity‑Exec (offline‑REPL)** +======================================================================================= +Run a natural‑language chat loop that generates runnable Python, executes it inside a +container, and streams the results back. Works even on clusters where **no networking** +is allowed for Singularity by using a long‑lived REPL inside the container. + +Back‑ends +--------- +1. **docker** – Docker daemon + container with FastAPI kernel. +2. **singularity** – Singularity *instance* with FastAPI kernel. +3. **singularity-exec** – Long‑lived `singularity exec` REPL that talks to + `/opt/offline_kernel.py --repl` (no TCP). """ from __future__ import annotations -import argparse import base64 import json import os @@ -22,17 +22,19 @@ import shlex import subprocess import sys +import tempfile import textwrap import time +import uuid from datetime import datetime from pathlib import Path -from typing import List, Tuple +from typing import Dict, List, Optional, Tuple -# ── Third‑party deps ───────────────────────────────────────────────────────── +# ── 3rd‑party deps ────────────────────────────────────────────────────────── try: from dotenv import load_dotenv from openai import OpenAI, APIError - import requests + import requests # only needed for networked back‑ends from rich.console import Console from rich.panel import Panel from rich.prompt import Prompt @@ -43,12 +45,29 @@ sys.exit(1) console = Console() +SCRIPT_DIR = Path(__file__).resolve().parent +DATASETS_DIR = SCRIPT_DIR / "datasets" +OUTPUTS_DIR = SCRIPT_DIR / "outputs" +ENV_FILE = SCRIPT_DIR / ".env" -# ── Runtime‑backend selection (ask the user **before** importing managers) ── -backend = Prompt.ask("Choose sandbox backend", choices=["docker", "singularity"], default="docker") +# In‑container canonical paths +SANDBOX_DATA_PATH = "/workspace/dataset.h5ad" +SANDBOX_RESOURCES_DIR = "/workspace/resources" -SCRIPT_DIR = Path(__file__).resolve().parent +# ============================================================================== +# 1 · Choose back‑end BEFORE importing heavy managers +# ============================================================================== +backend = Prompt.ask( + "Choose sandbox backend", + choices=["docker", "singularity", "singularity-exec"], + default="docker", +) + +is_exec_mode = backend == "singularity-exec" +# ----------------------------------------------------------------------------- +# 1a · Docker (FastAPI) back‑end +# ----------------------------------------------------------------------------- if backend == "docker": sandbox_dir = SCRIPT_DIR / "sandbox" sys.path.insert(0, str(sandbox_dir)) @@ -60,73 +79,204 @@ ) finally: sys.path.pop(0) - COPY_CMD = lambda src, dst: subprocess.run(["docker", "cp", src, dst], check=True) + def COPY_CMD(src: str, dst: str): + subprocess.run(["docker", "cp", src, dst], check=True) + + EXECUTE_ENDPOINT = f"http://localhost:{_API_PORT}/execute" + STATUS_ENDPOINT = f"http://localhost:{_API_PORT}/status" + +# ----------------------------------------------------------------------------- +# 1b · Singularity instance (FastAPI) back‑end +# ----------------------------------------------------------------------------- elif backend == "singularity": sandbox_dir = SCRIPT_DIR / "sandbox" sys.path.insert(0, str(sandbox_dir)) try: import benchmarking_sandbox_management_singularity as sing - except ImportError as e: - console.print(f"[red]Failed to import Singularity manager: {e}[/red]") - sys.exit(1) + finally: + sys.path.pop(0) - class _SingWrapper: # thin adapter to mimic Docker SandboxManager API - def __init__(self): - pass + class _SingInstanceWrapper: def start_container(self): return sing.start_instance() - def stop_container(self, remove: bool = True, container_obj=None): + + def stop_container(self): return sing.stop_instance() - _BackendManager = _SingWrapper + + _BackendManager = _SingInstanceWrapper _SANDBOX_HANDLE = sing.INSTANCE_NAME _API_PORT = sing.API_PORT_HOST - # Apptainer/ Singularity lacks a simple cp, so we issue a warning and rely on bind‑mounts - def COPY_CMD(src, dst): # noqa: N802 - console.print(f"[yellow]File copy inside Singularity instance not automated.\n" - f"Ensure the file {src} is reachable at {dst} via bind mount or in the definition file.[/yellow]") + def COPY_CMD(src: str, dst: str): + console.print( + f"[yellow]Singularity instance: ensure {src} is reachable at {dst} via bind mount.[/yellow]" + ) -else: - console.print("[red]Unknown backend choice.[/red]") - sys.exit(1) + EXECUTE_ENDPOINT = f"http://localhost:{_API_PORT}/execute" + STATUS_ENDPOINT = f"http://localhost:{_API_PORT}/status" -# ── Constants (after backend choice) ───────────────────────────────────────── -DATASETS_DIR = SCRIPT_DIR / "datasets" -OUTPUTS_DIR = SCRIPT_DIR / "outputs" -ENV_FILE = SCRIPT_DIR / ".env" -SANDBOX_DATA_PATH = "/home/sandboxuser/data.h5ad" -SANDBOX_RESOURCES_DIR = "/home/sandboxuser/resources" -API_BASE_URL = f"http://localhost:{_API_PORT}" -EXECUTE_ENDPOINT = f"{API_BASE_URL}/execute" -STATUS_ENDPOINT = f"{API_BASE_URL}/status" +# ----------------------------------------------------------------------------- +# 1c · Singularity exec (offline‑REPL) back‑end +# ----------------------------------------------------------------------------- +elif backend == "singularity-exec": + sandbox_dir = SCRIPT_DIR / "sandbox" + sys.path.insert(0, str(sandbox_dir)) + try: + import benchmarking_sandbox_management_singularity as sing + finally: + sys.path.pop(0) + + SIF_PATH = sing.SIF_PATH + SING_BIN = sing.SING_BIN + SENTINEL = "<<>>" + class _SingExecBackend: + """Launch one long‑lived REPL inside the SIF and stream code to it.""" + + def __init__(self): + self._binds: List[str] = [] + self._proc: Optional[subprocess.Popen[str]] = None + + def set_data(self, dataset: Path, resources: List[Tuple[Path, str]]): + self._binds = [ + "--bind", + f"{dataset.resolve()}:{SANDBOX_DATA_PATH}", + ] + for host, cont in resources: + self._binds.extend(["--bind", f"{host.resolve()}:{cont}"]) + + # ------------------------------------------------------------------ + # Container lifecycle + # ------------------------------------------------------------------ + def start_container(self): + if self._proc: + return True # already running + if not sing.pull_sif_if_needed(): + return False + + cmd = [ + SING_BIN, + "exec", + "--containall", + "--cleanenv", + *self._binds, + str(SIF_PATH), + "python", + "/opt/offline_kernel.py", + "--repl", + ] + self._proc = subprocess.Popen( + cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=1, # line buffered + ) + # Wait for the REPL banner + ready_line = self._proc.stdout.readline().strip() + if ready_line != "__REPL_READY__": + console.print( + f"[red]REPL failed to start. Got: {ready_line}[/red]" + ) + self.stop_container() + return False + return True + + def stop_container(self): + if not self._proc: + return True + try: + if self._proc.stdin: + self._proc.stdin.close() + self._proc.terminate() + self._proc.wait(timeout=5) + except Exception: + self._proc.kill() + self._proc = None + return True + + # ------------------------------------------------------------------ + # Code execution + # ------------------------------------------------------------------ + def exec_code(self, code: str, timeout: int = 300) -> Dict: + if not self._proc: + raise RuntimeError("REPL not running") + assert self._proc.stdin and self._proc.stdout + + # Send code block + sentinel + self._proc.stdin.write(code) + if not code.endswith("\n"): + self._proc.stdin.write("\n") + self._proc.stdin.write(SENTINEL + "\n") + self._proc.stdin.flush() + + # Read exactly one JSON line + start_time = time.time() + while True: + if time.time() - start_time > timeout: + return { + "status": "timeout", + "stdout": "", + "stderr": "Execution timed out in REPL.", + "images": [], + } + line = self._proc.stdout.readline() + if not line: + continue + line = line.strip() + try: + return json.loads(line) + except json.JSONDecodeError: + # Non‑JSON noise; continue reading + continue + + _BackendManager = _SingExecBackend + def COPY_CMD(src: str, dst: str): + console.print("[yellow]singularity-exec mode uses bind mounts instead of docker cp.[/yellow]") +else: + console.print("[red]Unknown backend.") + sys.exit(1) -# ── Helper utilities ──────────────────────────────────────────────────────── +# ==================================================================================== +# 2 · Generic helpers (unchanged) +# ==================================================================================== -def extract_python_code(txt: str) -> str | None: +def extract_python_code(txt: str) -> Optional[str]: m = re.search(r"```python\s*([\s\S]+?)\s*```", txt) return m.group(1).strip() if m else None -def display(role: str, content: str) -> None: +# Rich display wrappers + +def _panel(role: str, content: str): titles = {"system": "SYSTEM", "user": "USER", "assistant": "ASSISTANT"} styles = {"system": "dim blue", "user": "cyan", "assistant": "green"} - title = titles.get(role, role.upper()) - style = styles.get(role, "white") + console.print(Panel(content, title=titles.get(role, role.upper()), border_style=styles.get(role, "white"))) + +def display(role: str, content: str): if role == "assistant": - code = extract_python_code(content) - txt = re.sub(r"```python[\s\S]+?```", "", content, count=1).strip() - if txt: - console.print(Panel(txt, title=f"{title} (text)", border_style=style)) + code = extract_python_code(content) or "" + text_part = re.sub(r"```python[\s\S]+?```", "", content, count=1).strip() + if text_part: + _panel("assistant", text_part) if code: - console.print(Panel(Syntax(code, "python", line_numbers=True), title=f"{title} (code)", border_style=style)) + console.print( + Panel( + Syntax(code, "python", line_numbers=True), + title="ASSISTANT (code)", + border_style="green", + ) + ) else: - console.print(Panel(content, title=title, border_style=style)) + _panel(role, content) -# ── Dataset & prompt helpers ──────────────────────────────────────────────── +# ==================================================================================== +# 3 · Dataset / prompt helpers (unchanged) +# ==================================================================================== def get_initial_prompt() -> str: console.print("[bold cyan]Enter the initial user prompt (Ctrl+D to finish):[/bold cyan]") @@ -135,7 +285,7 @@ def get_initial_prompt() -> str: except EOFError: txt = "" if not txt: - console.print("[red]Empty prompt. Aborting.[/red]") + console.print("[red]Empty prompt – aborting.[/red]") sys.exit(1) return txt @@ -144,7 +294,11 @@ def select_dataset() -> Tuple[Path, dict]: if not DATASETS_DIR.exists(): console.print(f"[red]Datasets dir not found: {DATASETS_DIR}[/red]") sys.exit(1) - items = [(p, json.loads(p.with_suffix(".json").read_text())) for p in DATASETS_DIR.glob("*.h5ad") if p.with_suffix(".json").exists()] + items = [ + (p, json.loads(p.with_suffix(".json").read_text())) + for p in DATASETS_DIR.glob("*.h5ad") + if p.with_suffix(".json").exists() + ] if not items: console.print("[red]No datasets found.[/red]") sys.exit(1) @@ -155,13 +309,13 @@ def select_dataset() -> Tuple[Path, dict]: for i, (p, meta) in enumerate(items, 1): tbl.add_row(str(i), meta.get("dataset_title", p.stem), str(meta.get("cell_count", "?"))) console.print(tbl) - idx = int(Prompt.ask("Choose index", choices=[str(i) for i in range(1, len(items)+1)])) - 1 + idx = int(Prompt.ask("Choose index", choices=[str(i) for i in range(1, len(items) + 1)])) - 1 return items[idx] def collect_resources() -> List[Tuple[Path, str]]: - console.print("\n[bold cyan]Optional: list files/folders to copy into sandbox[/bold cyan] (blank line to finish)") - lst: List[Tuple[Path, str]] = [] + console.print("\n[bold cyan]Optional: paths to bind inside sandbox[/bold cyan] (blank line to finish)") + res: List[Tuple[Path, str]] = [] while True: p = Prompt.ask("Path", default="").strip() if not p: @@ -170,124 +324,145 @@ def collect_resources() -> List[Tuple[Path, str]]: if not path.exists(): console.print(f"[yellow]Path does not exist: {path}[/yellow]") continue - lst.append((path, f"{SANDBOX_RESOURCES_DIR}/{path.name}")) - return lst + res.append((path, f"{SANDBOX_RESOURCES_DIR}/{path.name}")) + return res -# ── FastAPI kernel helpers ────────────────────────────────────────────────── +# ==================================================================================== +# 4 · Networked FastAPI helpers (skipped for exec mode) +# ==================================================================================== def api_alive(max_retries: int = 10, delay: float = 1.5) -> bool: + if is_exec_mode: + return True # nothing to ping for _ in range(max_retries): try: if requests.get(STATUS_ENDPOINT, timeout=2).json().get("status") == "ok": return True - except requests.RequestException: + except Exception: time.sleep(delay) return False def format_execute_response(resp: dict) -> str: lines = ["Code execution result:"] - stdout, stderr, imgs = [], [], [] - for itm in resp.get("outputs", []): - if itm["type"] == "stream": - (stdout if itm.get("name") == "stdout" else stderr).append(itm.get("text", "")) - elif itm["type"] == "error": - stderr.append("Error: " + itm.get("evalue", "")) - stderr.extend(itm.get("traceback", [])) - elif itm["type"] == "display_data": - for mime, b64 in itm.get("data", {}).items(): - if mime.startswith("image/"): - fname = OUTPUTS_DIR / f"{datetime.now():%Y%m%d_%H%M%S_%f}.{mime.split('/')[1].split('+')[0]}" - fname.parent.mkdir(exist_ok=True) - with open(fname, "wb") as f: - f.write(base64.b64decode(b64)) - imgs.append(str(fname)) + if resp.get("status") != "ok": + lines.append(f"[status: {resp.get('status')}]") + stdout, stderr = resp.get("stdout", ""), resp.get("stderr", "") if stdout: - lines += ["--- STDOUT ---", "".join(stdout)[:1500]] + lines += ["--- STDOUT ---", stdout[:1500]] if stderr: - lines += ["--- STDERR ---", "".join(stderr)[:1500]] - if imgs: - lines.append("Saved images: " + ", ".join(imgs)) - lines.append(f"Final Status: {resp.get('final_status')}") + lines += ["--- STDERR ---", stderr[:1500]] + img_paths = [] + for b64 in resp.get("images", []): + fname = OUTPUTS_DIR / f"{datetime.now():%Y%m%d_%H%M%S_%f}.png" + fname.parent.mkdir(exist_ok=True, parents=True) + with open(fname, "wb") as f: + f.write(base64.b64decode(b64)) + img_paths.append(str(fname)) + if img_paths: + lines.append("Saved images: " + ", ".join(img_paths)) return "\n".join(lines) -# ── Chat‑runner ───────────────────────────────────────────────────────────── +# ==================================================================================== +# 5 · Main interactive loop +# ==================================================================================== -def run_interactive(prompt: str, dataset: Path, metadata: dict, resources: List[Tuple[Path, str]]) -> None: +def run_interactive(prompt: str, dataset: Path, metadata: dict, resources: List[Tuple[Path, str]]): mgr = _BackendManager() console.print(f"Starting sandbox ({backend}) …") + + # Tell exec back‑end where data/resources are (creates bind list) + if is_exec_mode and hasattr(mgr, "set_data"): + mgr.set_data(dataset, resources) + if not mgr.start_container(): console.print("[red]Failed to start sandbox.[/red]") return - try: - if not api_alive(): - console.print("[red]Kernel API not responsive.[/red]") - return - # dataset copy (Docker only, Singularity warns via COPY_CMD) + if not api_alive(): + console.print("[red]Kernel API not responsive (networked back‑end).[/red]") + return + + # For docker / singularity‑instance we still *attempt* docker cp (no‑op or warning otherwise) + if not is_exec_mode: COPY_CMD(str(dataset), f"{_SANDBOX_HANDLE}:{SANDBOX_DATA_PATH}") for h, c in resources: COPY_CMD(str(h), f"{_SANDBOX_HANDLE}:{c}") - resource_lines = [f"- {c} (from {h})" for h, c in resources] or ["- (none)"] - sys_prompt = textwrap.dedent( - f""" - You are an AI assistant analysing a single‑cell dataset. The file lives inside the sandbox at **{SANDBOX_DATA_PATH}**. - Additional resources:\n""" + "\n".join(resource_lines) + "\n\n" + textwrap.dedent( - f"Dataset metadata:\n{json.dumps(metadata, indent=2)}\n\nWrap runnable Python in triple‑backtick ```python blocks. Imports & vars persist.""" - ) + resource_lines = [f"- {c} (from {h})" for h, c in resources] or ["- (none)"] + sys_prompt = textwrap.dedent( + f""" + You are an AI assistant analysing a single‑cell dataset. + Dataset path inside container: **{SANDBOX_DATA_PATH}** + Additional resources:\n""" + + "\n".join(resource_lines) + + "\n\n" + + textwrap.dedent( + f"Dataset metadata:\n{json.dumps(metadata, indent=2)}\n\n" + "Wrap runnable Python in triple‑backtick ```python blocks. Imports & variables persist within the container session." ) + ) - history = [ - {"role": "system", "content": sys_prompt}, - {"role": "user", "content": prompt}, - ] - display("system", sys_prompt) - display("user", prompt) - - openai = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) - turn = 0 - while True: - turn += 1 - console.print(f"\n[bold]OpenAI call (turn {turn})…[/bold]") - try: - rsp = openai.chat.completions.create(model="gpt-4o", messages=history, temperature=0.7) - except APIError as e: - console.print(f"[red]OpenAI error: {e}[/red]") - break - assistant_msg = rsp.choices[0].message.content - history.append({"role": "assistant", "content": assistant_msg}) - display("assistant", assistant_msg) - - code = extract_python_code(assistant_msg) - if code: - console.print("[cyan]Executing code…[/cyan]") - try: - api_r = requests.post(EXECUTE_ENDPOINT, json={"code": code, "timeout": 120}, timeout=130).json() - feedback = format_execute_response(api_r) - except Exception as exc: - feedback = f"Code execution result:\n[Execution error: {exc}]" - history.append({"role": "user", "content": feedback}) - display("user", feedback) - - console.print("\n[bold]Next message (blank = continue, 'exit' to quit):[/bold]") + history = [ + {"role": "system", "content": sys_prompt}, + {"role": "user", "content": prompt}, + ] + display("system", sys_prompt) + display("user", prompt) + + openai = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + turn = 0 + while True: + turn += 1 + console.print(f"\n[bold]OpenAI call (turn {turn})…[/bold]") + try: + rsp = openai.chat.completions.create( + model="gpt-4o", messages=history, temperature=0.7 + ) + except APIError as e: + console.print(f"[red]OpenAI error: {e}[/red]") + break + assistant_msg = rsp.choices[0].message.content + history.append({"role": "assistant", "content": assistant_msg}) + display("assistant", assistant_msg) + + code = extract_python_code(assistant_msg) + if code: + console.print("[cyan]Executing code…[/cyan]") try: - user_in = input().strip() - except (EOFError, KeyboardInterrupt): - user_in = "exit" - if user_in.lower() in {"exit", "quit"}: - break - if user_in: - history.append({"role": "user", "content": user_in}) - display("user", user_in) - finally: - console.print("Stopping sandbox…") - mgr.stop_container(remove=True) + if is_exec_mode: + exec_result = mgr.exec_code(code, timeout=300) + else: + exec_result = requests.post( + EXECUTE_ENDPOINT, json={"code": code, "timeout": 300}, timeout=310 + ).json() + feedback = format_execute_response(exec_result) + except Exception as exc: + feedback = f"Code execution result:\n[Execution error on host: {exc}]" + + history.append({"role": "user", "content": feedback}) + display("user", feedback) + + console.print("\n[bold]Next message (blank = continue, 'exit' to quit):[/bold]") + try: + user_in = input().strip() + except (EOFError, KeyboardInterrupt): + user_in = "exit" + if user_in.lower() in {"exit", "quit"}: + break + if user_in: + history.append({"role": "user", "content": user_in}) + display("user", user_in) + + console.print("Stopping sandbox…") + mgr.stop_container() -# ── CLI entry ─────────────────────────────────────────────────────────────── +# ==================================================================================== +# 6 · Entry‑point +# ==================================================================================== def main(): load_dotenv(Path(ENV_FILE)) @@ -297,8 +472,8 @@ def main(): prompt = get_initial_prompt() data_p, meta = select_dataset() - res = collect_resources() - run_interactive(prompt, data_p, meta, res) + resources = collect_resources() + run_interactive(prompt, data_p, meta, resources) if __name__ == "__main__": @@ -306,3 +481,4 @@ def main(): main() except KeyboardInterrupt: console.print("\nInterrupted.") + diff --git a/benchmarking/sandbox/Singularity b/benchmarking/sandbox/Singularity index d4b4995..78b79b4 100644 --- a/benchmarking/sandbox/Singularity +++ b/benchmarking/sandbox/Singularity @@ -10,6 +10,7 @@ cp "requirements.txt" "${SINGULARITY_ROOTFS}/opt/app/requirements.txt" cp "kernel_api.py" "${SINGULARITY_ROOTFS}/opt/app/kernel_api.py" cp "start_kernel.py" "${SINGULARITY_ROOTFS}/opt/app/start_kernel.py" cp "start.sh" "${SINGULARITY_ROOTFS}/opt/app/start.sh" +cp "offline_kernel.py" "${SINGULARITY_ROOTFS}/opt/offline_kernel.py" # --------------------------------------------------------------- # Build steps executed INSIDE the container @@ -41,6 +42,9 @@ pip install --no-cache-dir -r /opt/app/requirements.txt chown -R "$NB_USER:$NB_GID" /opt/app chmod +x /opt/app/start_kernel.py /opt/app/start.sh +chown "$NB_USER:$NB_GID" /opt/offline_kernel.py +chmod +x /opt/offline_kernel.py + # --------------------------------------------------------------- # Runtime environment # --------------------------------------------------------------- diff --git a/benchmarking/sandbox/benchmarking_sandbox_management_singularity.py b/benchmarking/sandbox/benchmarking_sandbox_management_singularity.py index b260ad7..2f974ce 100644 --- a/benchmarking/sandbox/benchmarking_sandbox_management_singularity.py +++ b/benchmarking/sandbox/benchmarking_sandbox_management_singularity.py @@ -1,13 +1,22 @@ #!/usr/bin/env python3 -"""Singularity Sandbox Manager (Docker‑free) -========================================== -Pure‑Singularity version that **does not require Docker at all**. It pulls a -pre-built `sandbox.sif` from a specified URL when necessary. - -Commands -------------------------- - build – download/update `sandbox.sif` from a predefined URL - start – start an *instance* exposing the FastAPI kernel on host port 8000 +""" +Singularity Sandbox Manager (Docker-free) +======================================== +Pure-Singularity version that does not require Docker. Pulls a pre-built +`sandbox.sif` from a specified URL when necessary. + +Supports: +1. Networked mode: starts a Singularity instance with a FastAPI kernel. +2. Offline mode: provides SIF management (`pull_sif_if_needed`) and the + `SIF_PATH` / `SING_BIN` constants for the `singularity-exec` backend + in InteractiveAgentTester.py. + +For offline mode to function, the `sandbox.sif` image must contain +`/opt/offline_kernel.py`. Ensure it’s included during the SIF build. + +Commands: + build – download/update `sandbox.sif` + start – start an instance exposing the FastAPI kernel on port 8000 stop – stop & remove the instance status – show instance + port status logs – tail the instance log (default 50 lines) @@ -17,6 +26,7 @@ from __future__ import annotations import argparse +import getpass import logging import os import shlex @@ -26,13 +36,10 @@ import time from pathlib import Path -# --------------------------------------------------------------------------- -# Paths & constants -# --------------------------------------------------------------------------- +# ── Paths & constants (for InteractiveAgentTester.py) ───────────────────────── SCRIPT_DIR = Path(__file__).resolve().parent -SIF_PATH = SCRIPT_DIR / "sandbox.sif" # output image (will be downloaded here) -# DEF_FILE is no longer needed as we are pulling a pre-built image. -CONTAINER_URL = "https://github.com/OpenTechBio/Olaf/releases/download/v0.0.1/benchmarking_sandbox.sif" +SIF_PATH = SCRIPT_DIR / "sandbox.sif" +CONTAINER_URL = "https://github.com/OpenTechBio/Olaf/releases/download/v0.0.1/sandbox.sif" INSTANCE_NAME = "benchmarking_sandbox_instance" API_PORT_INSIDE = 8000 @@ -40,20 +47,23 @@ SING_BIN = shutil.which("apptainer") or shutil.which("singularity") if not SING_BIN: - print("Singularity/Apptainer executable not found in PATH. Do you need to load a module?", file=sys.stderr) + print( + "Singularity/Apptainer executable not found in PATH. " + "Do you need to load a module?", + file=sys.stderr, + ) sys.exit(1) -# --------------------------------------------------------------------------- -# Helpers -# --------------------------------------------------------------------------- -def run(cmd: list[str], *, capture: bool = False, check: bool = True): +def run(cmd: list[str], *, capture: bool = False, check: bool = True, timeout: int | None = None): logging.debug("$ %s", " ".join(shlex.quote(c) for c in cmd)) - return subprocess.run(cmd, text=True, capture_output=capture, check=check) + return subprocess.run(cmd, text=True, capture_output=capture, check=check, timeout=timeout) def pull_sif_if_needed(force_pull: bool = False) -> bool: - """Pull sandbox.sif from the predefined URL if it doesn't exist or if force_pull is true.""" + """ + Pull sandbox.sif from the predefined URL if it doesn't exist or if force_pull is true. + """ if SIF_PATH.exists() and not force_pull: logging.info("Using existing SIF: %s", SIF_PATH) return True @@ -66,198 +76,208 @@ def pull_sif_if_needed(force_pull: bool = False) -> bool: logging.error("Failed to remove existing SIF %s: %s", SIF_PATH, e) return False - logging.info("Pulling %s from %s …", SIF_PATH, CONTAINER_URL) - # Command: singularity pull [--force] - # Using --force to handle cases where the file might exist despite prior checks or incomplete downloads. + logging.info( + "Pulling %s from %s … (SIF must contain /opt/offline_kernel.py)", + SIF_PATH, + CONTAINER_URL, + ) cmd = [SING_BIN, "pull", "--force", str(SIF_PATH), CONTAINER_URL] try: run(cmd) if not SIF_PATH.exists() or SIF_PATH.stat().st_size == 0: - logging.error("Singularity pull command executed, but SIF file is missing or empty.") + logging.error("Pull succeeded but SIF is missing or empty.") return False logging.info("Pull finished ✓. SIF is at %s", SIF_PATH) return True except subprocess.CalledProcessError as e: - logging.error("Singularity pull failed (return code %s)", e.returncode) - # subprocess.run with text=True should populate stdout/stderr - if hasattr(e, 'stderr') and e.stderr: + logging.error("Singularity pull failed (code %s)", e.returncode) + if e.stderr: logging.error("Stderr:\n%s", e.stderr.strip()) - if hasattr(e, 'stdout') and e.stdout: # Sometimes singularity puts error info in stdout - logging.error("Stdout:\n%s", e.stdout.strip()) + if e.stdout: + logging.error("Stdout:\n%s", e.stdout.strip()) return False except Exception as e: - logging.error("An unexpected error occurred during Singularity pull: %s", e) + logging.error("Unexpected error during pull: %s", e) return False def instance_running() -> bool: try: - # Use check=False as a non-zero exit code (no instances running) is not an error here. result = run([SING_BIN, "instance", "list"], capture=True, check=False) return INSTANCE_NAME in result.stdout - except subprocess.CalledProcessError: # Should not happen with check=False + except subprocess.CalledProcessError: return False - except FileNotFoundError: # If SING_BIN itself is somehow removed mid-script + except FileNotFoundError: logging.error("Singularity/Apptainer executable not found.") return False -def start_instance(rebuild: bool = False) -> bool: # rebuild here means force_pull for the SIF +def start_instance(rebuild: bool = False) -> bool: + """ + Start a Singularity instance for FastAPI access (networked mode). + `rebuild=True` forces re-pulling the SIF. + """ if instance_running(): logging.warning("Instance already running – restarting…") - if not stop_instance(): # Attempt to stop, if it fails, don't proceed. - logging.error("Failed to stop existing instance. Cannot start new one.") - return False + if not stop_instance(): + logging.error("Could not stop existing instance.") + return False - # The 'rebuild' flag for start is interpreted as 'force_pull' for the SIF image if not pull_sif_if_needed(force_pull=rebuild): - logging.error("Failed to ensure SIF image is available. Cannot start instance.") + logging.error("Cannot ensure SIF image. Aborting start.") return False logging.info("Starting instance %s from %s …", INSTANCE_NAME, SIF_PATH) cmd = [ - SING_BIN, "instance", "start", + SING_BIN, + "instance", + "start", "--cleanenv", "--net", - "--network-args", f"portmap={API_PORT_HOST}:tcp:{API_PORT_INSIDE}", + "--network-args", + f"portmap={API_PORT_HOST}:tcp:{API_PORT_INSIDE}", str(SIF_PATH), INSTANCE_NAME, ] try: run(cmd) - logging.info("Instance start command executed. Waiting a moment to check status...") - time.sleep(3) # Give the instance a moment to register + logging.info("Start command sent. Waiting for instance…") + time.sleep(3) if instance_running(): - logging.info("Instance '%s' is running. Access API at http://localhost:%d", INSTANCE_NAME, API_PORT_HOST) + logging.info("Instance '%s' is running at http://localhost:%d", INSTANCE_NAME, API_PORT_HOST) return True - else: - logging.error("Instance '%s' failed to appear in list after start command.", INSTANCE_NAME) - # Attempt to get logs if possible (might not exist if start failed very early) - log_dir_base = Path.home() / ".apptainer" / "instances" / "logs" - # User might not be available via os.getenv reliably in all contexts, try to find log - # This part is heuristic for finding the log file. - user_name = os.getenv("USER", "unknown_user") - specific_log_dir_apptainer = log_dir_base / os.getenv("USER", "") # Apptainer specific - specific_log_dir_singularity = Path.home() / ".singularity" / "instances" / "logs" / os.getenv("USER", "") # Older Singularity - - log_file_apptainer = specific_log_dir_apptainer / f"{INSTANCE_NAME}.log" - log_file_singularity = specific_log_dir_singularity / f"{INSTANCE_NAME}.log" - - actual_log_file = None - if log_file_apptainer.exists(): - actual_log_file = log_file_apptainer - elif log_file_singularity.exists(): - actual_log_file = log_file_singularity - - if actual_log_file: - logging.error("Check instance logs for details: %s", actual_log_file) - try: - with open(actual_log_file, "r") as lf: - log_tail = "".join(list(lf)[-20:]) # last 20 lines - logging.error("Last few log lines:\n%s", log_tail) - except Exception as log_e: - logging.error("Could not read log file: %s", log_e) - else: - logging.error("Instance log file not found in typical locations.") - return False + logging.error("Instance failed to start.") + _log_last_lines() + return False except subprocess.CalledProcessError as e: - logging.error("Failed to start instance (command error): %s", e) - if hasattr(e, 'stderr') and e.stderr: logging.error("Stderr:\n%s", e.stderr.strip()) - if hasattr(e, 'stdout') and e.stdout: logging.error("Stdout:\n%s", e.stdout.strip()) + logging.error("Failed to start instance: %s", e) + if e.stderr: + logging.error("Stderr:\n%s", e.stderr.strip()) + if e.stdout: + logging.error("Stdout:\n%s", e.stdout.strip()) return False except Exception as e: - logging.error("An unexpected error occurred trying to start the instance: %s", e) + logging.error("Unexpected error starting instance: %s", e) return False def stop_instance() -> bool: + """ + Stop the Singularity instance (networked mode). + """ if not instance_running(): logging.info("Instance '%s' not running.", INSTANCE_NAME) return True + logging.info("Stopping instance %s …", INSTANCE_NAME) try: run([SING_BIN, "instance", "stop", INSTANCE_NAME]) - # Verify it's stopped if not instance_running(): - logging.info("Instance '%s' stopped successfully.", INSTANCE_NAME) + logging.info("Instance '%s' stopped.", INSTANCE_NAME) return True - else: - logging.warning("Instance stop command executed, but instance still appears in list. Check manually.") - return False # Or True depending on desired strictness + logging.warning("Stop command executed, but instance still listed.") + return False except subprocess.CalledProcessError as e: logging.error("Failed to stop instance: %s", e) - if hasattr(e, 'stderr') and e.stderr: logging.error("Stderr:\n%s", e.stderr.strip()) - if hasattr(e, 'stdout') and e.stdout: logging.error("Stdout:\n%s", e.stdout.strip()) + if e.stderr: + logging.error("Stderr:\n%s", e.stderr.strip()) + if e.stdout: + logging.error("Stdout:\n%s", e.stdout.strip()) return False except Exception as e: - logging.error("An unexpected error occurred trying to stop the instance: %s", e) + logging.error("Unexpected error stopping instance: %s", e) return False def show_status(): + """ + Display status of the networked instance and existence of SIF. + """ is_running = instance_running() - logging.info("Instance '%s': %s", INSTANCE_NAME, "RUNNING" if is_running else "STOPPED") + status = "RUNNING" if is_running else "STOPPED" + logging.info("Instance '%s' status: %s", INSTANCE_NAME, status) if is_running: - logging.info("API access (host): http://localhost:%d", API_PORT_HOST) + logging.info("API: http://localhost:%d", API_PORT_HOST) else: - logging.info("API port (host - if running): %d", API_PORT_HOST) + logging.info("API port (if running): %d", API_PORT_HOST) + + logging.info("SIF image path: %s", SIF_PATH) + if not SIF_PATH.exists(): + logging.warning("SIF image missing. Run 'build' command.") def show_logs(lines: int = 50): - # Determine log directory based on Apptainer/Singularity conventions - # Singularity: $HOME/.singularity/instances/logs//.log - # Apptainer: $HOME/.apptainer/instances/logs//.log - # Prefer Apptainer path if it exists, fallback to Singularity - user_name = os.getenv("USER") - if not user_name: - logging.error("USER environment variable not set, cannot reliably determine log path.") - # Fallback for some systems where USER might not be set in certain execution contexts - try: - import getpass - user_name = getpass.getuser() - except Exception: - logging.error("Could not determine username to find logs.") - return - - log_dir_apptainer = Path.home() / ".apptainer" / "instances" / "logs" / user_name - log_dir_singularity = Path.home() / ".singularity" / "instances" / "logs" / user_name - - log_file_apptainer = log_dir_apptainer / f"{INSTANCE_NAME}.log" - log_file_singularity = log_dir_singularity / f"{INSTANCE_NAME}.log" - - actual_log_file = None - if log_file_apptainer.exists(): - actual_log_file = log_file_apptainer - elif log_file_singularity.exists(): - actual_log_file = log_file_singularity - - if not instance_running() and not actual_log_file : # If not running and no log file, nothing to show - logging.warning("Instance '%s' not running and no log file found.", INSTANCE_NAME) + """ + Tail the last `lines` lines of the instance log (networked mode). + """ + user_name = os.getenv("USER") or getpass.getuser() + apptainer_dir = Path.home() / ".apptainer" / "instances" / "logs" / user_name + singularity_dir = Path.home() / ".singularity" / "instances" / "logs" / user_name + + log_file = None + if (apptainer_dir / f"{INSTANCE_NAME}.log").exists(): + log_file = apptainer_dir / f"{INSTANCE_NAME}.log" + elif (singularity_dir / f"{INSTANCE_NAME}.log").exists(): + log_file = singularity_dir / f"{INSTANCE_NAME}.log" + + if not instance_running() and not log_file: + logging.warning("Instance not running and no log file found.") return - elif not actual_log_file: # Running but somehow no log file yet (or path issue) - logging.warning("Instance '%s' is running, but its log file was not found at expected locations:\n- %s\n- %s", INSTANCE_NAME, log_file_apptainer, log_file_singularity) + if not log_file: + logging.warning( + "Instance is running, but log file not found:\n" + f" - {apptainer_dir}\n - {singularity_dir}" + ) return - elif not instance_running() and actual_log_file: - logging.info("Instance '%s' is not running. Displaying last logs from %s:", INSTANCE_NAME, actual_log_file) + if not instance_running(): + logging.info("Instance '%s' not running. Displaying last logs from %s", INSTANCE_NAME, log_file) - print(f"\n--- Logs for {INSTANCE_NAME} (last {lines} lines from {actual_log_file}) ---") + print(f"\n--- Logs for {INSTANCE_NAME} (last {lines} lines) ---") try: - log_content = run(["tail", "-n", str(lines), str(actual_log_file)], capture=True, check=True).stdout - print(log_content.strip()) + result = run(["tail", "-n", str(lines), str(log_file)], capture=True, check=True) + print(result.stdout.strip()) except subprocess.CalledProcessError as e: - logging.error("Could not read logs using tail: %s", e) - if hasattr(e, 'stderr') and e.stderr: logging.error("Stderr:\n%s", e.stderr.strip()) - except FileNotFoundError: # if tail is not found - logging.error("'tail' command not found. Cannot display logs.") + logging.error("Could not read logs: %s", e) + if e.stderr: + logging.error("Stderr:\n%s", e.stderr.strip()) + except FileNotFoundError: + logging.error("'tail' command not found.") print("-------------------------------------") -# --------------------------------------------------------------------------- -# Interactive REPL -# --------------------------------------------------------------------------- + +def _log_last_lines(): + """ + Helper to print the last 20 lines of the instance log if it exists. + """ + user_name = os.getenv("USER") or getpass.getuser() + apptainer_dir = Path.home() / ".apptainer" / "instances" / "logs" / user_name + singularity_dir = Path.home() / ".singularity" / "instances" / "logs" / user_name + + log_file = None + if (apptainer_dir / f"{INSTANCE_NAME}.log").exists(): + log_file = apptainer_dir / f"{INSTANCE_NAME}.log" + elif (singularity_dir / f"{INSTANCE_NAME}.log").exists(): + log_file = singularity_dir / f"{INSTANCE_NAME}.log" + + if log_file: + logging.error("Check instance logs: %s", log_file) + try: + with open(log_file, "r") as lf: + lines = lf.readlines()[-20:] + logging.error("Last 20 log lines:\n%s", "".join(lines)) + except Exception as e: + logging.error("Could not read log file: %s", e) + else: + logging.error("Instance log file not found.") + def repl(): - print("Singularity Sandbox Manager (pulls pre-built SIF). Type 'help'.") + """ + Interactive REPL for building, starting, stopping, status, logs. + """ + print("Singularity Sandbox Manager (Docker-free). Type 'help'.") + while True: try: line = input(f"{INSTANCE_NAME}> ").strip() @@ -266,12 +286,12 @@ def repl(): break if not line: continue - + try: - cmd_parts = shlex.split(line) - cmd = cmd_parts[0] - args = cmd_parts[1:] - except ValueError as e: # Handle issue with shlex.split if quotes are mismatched + parts = shlex.split(line) + cmd = parts[0] + args = parts[1:] + except ValueError as e: print(f"Error parsing command: {e}") continue @@ -279,103 +299,84 @@ def repl(): print("Exiting.") break elif cmd == "help": - print("\nAvailable commands:") - print(" build [--rebuild] - Ensure SIF image is downloaded (use --rebuild to force re-download).") - print(" start [--rebuild] - Start the instance (forces SIF re-download if --rebuild is used).") - print(" stop - Stop & remove the instance.") - print(" status - Show instance + port status.") - print(" logs [N] - Tail the instance log (default 50 lines).") - print(" exit | quit | q - Exit the manager.\n") + print( + "\nAvailable commands:\n" + " build [--rebuild] - Download/update SIF (offline + networked).\n" + " start [--rebuild] - Start networked instance (FastAPI).\n" + " stop - Stop the networked instance.\n" + " status - Show SIF & networked instance status.\n" + " logs [N] - Tail last N lines of networked logs.\n" + " exit | quit | q - Exit the manager.\n" + ) elif cmd == "build": - rebuild_flag = "--rebuild" in args - pull_sif_if_needed(force_pull=rebuild_flag) + pull_sif_if_needed(force_pull="--rebuild" in args) elif cmd == "start": - rebuild_flag = "--rebuild" in args - start_instance(rebuild=rebuild_flag) + start_instance(rebuild="--rebuild" in args) elif cmd == "stop": stop_instance() elif cmd == "status": show_status() elif cmd == "logs": - n_lines = 50 + n = 50 if args: try: - n_lines = int(args[0]) + n = int(args[0]) except ValueError: - print("Invalid number of lines. Using default 50.") - show_logs(n_lines) + print("Invalid number of lines; using default 50.") + show_logs(n) else: print(f"Unknown command: {cmd}. Type 'help' for available commands.") - - # Attempt to gracefully stop the instance if it's running when REPL exits + if instance_running(): - logging.info("REPL exited, stopping instance '%s' if running...", INSTANCE_NAME) + logging.info("REPL exiting; stopping instance '%s'.", INSTANCE_NAME) stop_instance() -# --------------------------------------------------------------------------- -# CLI -# --------------------------------------------------------------------------- if __name__ == "__main__": - # Configure logging - # Add a timestamp to the logging - logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S") - - - if len(sys.argv) == 1: # No arguments, start REPL - repl() - sys.exit(0) + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)s: %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) parser = argparse.ArgumentParser( - description="Singularity Sandbox Manager. Pulls a pre-built SIF and manages its instance.", - formatter_class=argparse.RawTextHelpFormatter # To preserve help text formatting + description="Singularity Sandbox Manager. Pulls SIF, manages networked instances.", + formatter_class=argparse.RawTextHelpFormatter, ) - subparsers = parser.add_subparsers(dest="cmd", title="commands", required=True) + subparsers = parser.add_subparsers(dest="cmd", title="commands", required=False) - # Build command (now means: ensure SIF is downloaded) - build_parser = subparsers.add_parser("build", help="Ensure SIF image is downloaded from the predefined URL.") + build_parser = subparsers.add_parser("build", help="Download/update the SIF image.") build_parser.add_argument( - "--rebuild", - action="store_true", - help="Force re-download of the SIF image even if it exists." + "--rebuild", action="store_true", help="Force re-download of the SIF." ) - # Start command - start_parser = subparsers.add_parser("start", help="Start the Singularity instance.") + start_parser = subparsers.add_parser("start", help="Start the networked Singularity instance.") start_parser.add_argument( - "--rebuild", - action="store_true", - help="Force re-download of the SIF image before starting." + "--rebuild", action="store_true", help="Force re-download of the SIF first." ) - # Stop command - stop_parser = subparsers.add_parser("stop", help="Stop and remove the Singularity instance.") + subparsers.add_parser("stop", help="Stop the networked Singularity instance.") + subparsers.add_parser("status", help="Show SIF & instance status.") - # Status command - status_parser = subparsers.add_parser("status", help="Show current status of the instance and API port.") - - # Logs command - logs_parser = subparsers.add_parser("logs", help="Show logs from the running instance.") - logs_parser.add_argument( - "n", - nargs="?", - type=int, - default=50, - help="Number of log lines to display (default: 50)." - ) + logs_parser = subparsers.add_parser("logs", help="Tail the instance log.") + logs_parser.add_argument("n", nargs="?", type=int, default=50, help="Number of lines (default: 50).") args = parser.parse_args() - operation_successful = True # Assume success unless a command returns False + if not args.cmd: + repl() + sys.exit(0) + + success = True if args.cmd == "build": - operation_successful = pull_sif_if_needed(force_pull=args.rebuild) + success = pull_sif_if_needed(force_pull=args.rebuild) elif args.cmd == "start": - operation_successful = start_instance(rebuild=args.rebuild) + success = start_instance(rebuild=args.rebuild) elif args.cmd == "stop": - operation_successful = stop_instance() + success = stop_instance() elif args.cmd == "status": - show_status() # Status typically doesn't fail in a way that sets operation_successful + show_status() elif args.cmd == "logs": - show_logs(args.n) # Logs display also doesn't typically set operation_successful + show_logs(args.n) - sys.exit(0 if operation_successful else 1) \ No newline at end of file + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/benchmarking/sandbox/offline_kernel.py b/benchmarking/sandbox/offline_kernel.py new file mode 100644 index 0000000..778582e --- /dev/null +++ b/benchmarking/sandbox/offline_kernel.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python3 +""" +Offline Kernel (state‑preserving REPL or single‑shot) +==================================================== +• **REPL mode** : `python /opt/offline_kernel.py --repl` + ‑ Parent writes a code chunk to stdin, terminates with the sentinel line + `<<>>`. Kernel executes it in a persistent global namespace, then + prints **one** JSON line. + ‑ Variables (e.g. `adata`) remain available in subsequent chunks. + +• **Single‑shot**: `python /opt/offline_kernel.py ` + ‑ Runs the file in a fresh namespace and exits (legacy behaviour). + +Returned JSON schema +-------------------- +{ + "status" : "ok" | "error" | "timeout", + "stdout" : "captured standard output", + "stderr" : "captured errors / traceback", + "images" : [ "base64_png", ... ] # Any matplotlib figures +} +""" +from __future__ import annotations + +import base64 +import io +import json +import os +import sys +import traceback +from contextlib import redirect_stdout, redirect_stderr +from pathlib import Path +from typing import List, Dict + +# force to save to /tmp, so that the kernel can be run in a container +os.environ.setdefault("MPLCONFIGDIR", "/tmp/.matplotlib") +os.environ.setdefault("NUMBA_CACHE_DIR", "/tmp/.numba_cache") +os.environ.setdefault("XDG_CONFIG_HOME", "/tmp/.config") +os.environ.setdefault("XDG_CACHE_HOME", "/tmp/.cache") +os.environ.setdefault("CELLTYPIST_HOME", "/tmp/.celltypist") +os.environ.setdefault("CELLTYPIST_FOLDER", "/tmp/.celltypist_cache") +os.environ.setdefault("TRANSFORMERS_CACHE", "/tmp/.transformers") + +SENTINEL = "<<>>" # Delimits code blocks in REPL mode +GLOBAL_NS: Dict = {"__builtins__": __builtins__} # Persistent namespace + +# --------------------------------------------------------------------------- +# Core execution helper +# --------------------------------------------------------------------------- + +def _run(code: str, ns: Dict) -> Dict: + """Execute *code* in *ns* and capture stdout / stderr / matplotlib figs.""" + out, err = io.StringIO(), io.StringIO() + images: List[str] = [] + status = "ok" + + try: + with redirect_stdout(out), redirect_stderr(err): + # Set up Matplotlib (optional) + try: + import matplotlib + matplotlib.use("Agg") + import matplotlib.pyplot as plt # noqa: F401 + ns["plt"] = plt + except ImportError: + pass # Matplotlib not installed; fine unless user imports it + + exec(compile(code, "", "exec"), ns) + + if "plt" in ns: + for fid in ns["plt"].get_fignums(): + fig = ns["plt"].figure(fid) + buf = io.BytesIO() + fig.savefig(buf, format="png", bbox_inches="tight") + images.append(base64.b64encode(buf.getvalue()).decode()) + ns["plt"].close(fig) + except Exception: + err.write(traceback.format_exc()) + status = "error" + finally: + # Make sure no phantom figures linger + if "plt" in ns: + try: + ns["plt"].close("all") + except Exception: + pass + + return { + "status": status, + "stdout": out.getvalue(), + "stderr": err.getvalue(), + "images": images, + } + +# --------------------------------------------------------------------------- +# REPL mode implementation +# --------------------------------------------------------------------------- + +def _repl() -> None: + """Persistent loop: read code chunks, exec, print JSON.""" + sys.stdout.write("__REPL_READY__\n") + sys.stdout.flush() + + buffer: List[str] = [] + for line in sys.stdin: + if line.rstrip("\n") == SENTINEL: + code_block = "".join(buffer) + buffer.clear() + result = _run(code_block, GLOBAL_NS) + sys.stdout.write(json.dumps(result) + "\n") + sys.stdout.flush() + else: + buffer.append(line) + +# --------------------------------------------------------------------------- +# Single‑shot helper (legacy) +# --------------------------------------------------------------------------- + +def _single_shot(file_path: Path): + if not file_path.is_file(): + print(json.dumps({ + "status": "error", + "stdout": "", + "stderr": f"File not found: {file_path}", + "images": [] + })) + sys.exit(3) + try: + code = file_path.read_text("utf‑8") + except Exception as e: + print(json.dumps({ + "status": "error", + "stdout": "", + "stderr": f"Error reading {file_path}: {e}", + "images": [] + })) + sys.exit(4) + + print(json.dumps(_run(code, GLOBAL_NS))) + +# --------------------------------------------------------------------------- +# Entry‑point +# --------------------------------------------------------------------------- +if __name__ == "__main__": + if len(sys.argv) == 2 and sys.argv[1] == "--repl": + _repl() + sys.exit(0) + + if len(sys.argv) != 2: + print( + "Usage: offline_kernel.py OR offline_kernel.py --repl", + file=sys.stderr, + ) + sys.exit(2) + + _single_shot(Path(sys.argv[1])) diff --git a/benchmarking/sandbox/requirements.txt b/benchmarking/sandbox/requirements.txt index 956c953..bdcad1e 100644 --- a/benchmarking/sandbox/requirements.txt +++ b/benchmarking/sandbox/requirements.txt @@ -24,4 +24,5 @@ psutil==6.0.0 defusedxml==0.7.1 requests==2.32.3 jupyter -jupyter_client \ No newline at end of file +jupyter_client +matplotlib \ No newline at end of file