diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..a9fe88f6 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,17 @@ +root = true + +[*] +insert_final_newline = true +end_of_line = lf + +[*.{ts,tsx,json}] +indent_style = space +indent_size = 2 + +[*.md] +indent_style = space +indent_size = 2 + +[*.{html,css}] +indent_style = space +indent_size = 4 \ No newline at end of file diff --git a/.github/workflows/build-test-publish.yml b/.github/workflows/build-test-publish.yml new file mode 100644 index 00000000..cbdaff42 --- /dev/null +++ b/.github/workflows/build-test-publish.yml @@ -0,0 +1,59 @@ +name: Build, Test, Publish + +on: [push] + +jobs: + test-publish: + name: Build, test, and publish package + runs-on: ubuntu-latest + permissions: + contents: read + timeout-minutes: 10 + steps: + - uses: actions/checkout@v6 + - run: corepack enable + - uses: actions/setup-node@v6 + with: + node-version: 24 + cache: yarn + + - run: yarn install --immutable + + - run: yarn test:coverage + - run: yarn build + - run: yarn test:build + + - name: Test in browser (If UMD tests are failing, check torch-bridge.js) + run: | + yarn serve & + npx wait-on tcp:8080 + npx mocha-headless-chrome -a no-sandbox -t 60000 -f http://127.0.0.1:8080/test/index.html + npx mocha-headless-chrome -a no-sandbox -t 60000 -f http://127.0.0.1:8080/test/umd.html + + - name: Build docs and prepare directory + run: | + yarn docs + mkdir -p docs/media + [ -d "build" ] && cp -r build docs/build + [ -d "build" ] && cp -r build docs/media/build + [ -d "examples" ] && cp -r examples docs/examples + [ -d "examples" ] && cp -r examples docs/media/examples + + - uses: actions/upload-pages-artifact@v4 + with: + path: './docs' + + - run: yarn pkg-pr-new publish + + deploy-docs: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + permissions: + pages: write + id-token: write + needs: test-publish + steps: + - uses: actions/deploy-pages@v5 + id: deployment diff --git a/.github/workflows/pyodide-test.yml b/.github/workflows/pyodide-test.yml new file mode 100644 index 00000000..99cf865f --- /dev/null +++ b/.github/workflows/pyodide-test.yml @@ -0,0 +1,36 @@ +name: Pyodide Test + +on: [push] + +jobs: + pyodide-test: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v6 + - run: corepack enable + - uses: actions/setup-node@v6 + with: + node-version: 24 + cache: yarn + + - run: yarn install --immutable + + - run: yarn build + + - name: Install Pyodide example dependencies + run: cd examples/pyodide && yarn install --immutable + + - uses: actions/setup-python@v6 + with: + python-version: '3.14' + cache: 'pip' + cache-dependency-path: scripts/requirements.txt + + - run: pip install -r scripts/requirements.txt + + - name: Generate PyTorch output + run: cd examples/pyodide && yarn gen-cache + + - name: Run Pyodide Tests + run: cd examples/pyodide && yarn test-pyodide diff --git a/.github/workflows/verify-generated-tests.yml b/.github/workflows/verify-generated-tests.yml new file mode 100644 index 00000000..459bb779 --- /dev/null +++ b/.github/workflows/verify-generated-tests.yml @@ -0,0 +1,37 @@ +name: Verify Generated Tests + +on: [push] + +jobs: + verify-sync: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v6 + - run: corepack enable + - uses: actions/setup-node@v6 + with: + node-version: 24 + cache: yarn + + - run: yarn install --immutable + + - uses: actions/setup-python@v6 + with: + python-version: '3.14' + cache: 'pip' + cache-dependency-path: scripts/requirements.txt + + - run: pip install -r scripts/requirements.txt + + - name: Sanity Check Generated File Length + run: | + COMMITTED_LINES=$(wc -l < test/testcases.gen.js) + yarn update-tests + GENERATED_LINES=$(wc -l < test/testcases.gen.js) + if [ "$COMMITTED_LINES" -ne "$GENERATED_LINES" ]; then + echo "❌ Line count mismatch! Committed: $COMMITTED_LINES, Generated: $GENERATED_LINES." + echo "Please run 'yarn update-tests' locally and commit the updated test file." + exit 1 + fi + echo "✅ Line counts match ($COMMITTED_LINES lines). Sanity check passed." diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..140c4045 --- /dev/null +++ b/.gitignore @@ -0,0 +1,61 @@ +# See https://help.github.com/ignore-files/ for more about ignoring files. +/docs + +.direnv +.envrc +/shell.nix +.idea +/test.js +/test.py + +# dependencies +/node_modules + +# testing +/coverage + +# production +/build +src/styles/*.css +/.env + +# deployment +terraform* + +# sourceror bundle +/public/externalLibs/sourceror + +# misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local +.idea/ + +npm-debug.log* +.eslintcache +yarn-error.log + +# emacs backup files + +*~ + +# yarn files + +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/sdks +!.yarn/versions + +# Swap the comments on the following lines if you wish to use zero-installs +# In that case, don't forget to run `yarn config set enableGlobalCache false`! +# Documentation here: https://yarnpkg.com/features/caching#zero-installs + +#!.yarn/cache +.pnp.* + +# We use .node-version +/.tool-versions diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..00f0dc88 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,8 @@ +{ + "semi": true, + "singleQuote": true, + "printWidth": 100, + "arrowParens": "avoid", + "trailingComma": "none", + "endOfLine": "auto" +} diff --git a/.yarn/sdks/eslint/bin/eslint.js b/.yarn/sdks/eslint/bin/eslint.js new file mode 100755 index 00000000..e6604ff5 --- /dev/null +++ b/.yarn/sdks/eslint/bin/eslint.js @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/bin/eslint.js + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/bin/eslint.js your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/bin/eslint.js`)); diff --git a/.yarn/sdks/eslint/lib/api.js b/.yarn/sdks/eslint/lib/api.js new file mode 100644 index 00000000..8addf97f --- /dev/null +++ b/.yarn/sdks/eslint/lib/api.js @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint`)); diff --git a/.yarn/sdks/eslint/lib/config-api.js b/.yarn/sdks/eslint/lib/config-api.js new file mode 100644 index 00000000..e84435de --- /dev/null +++ b/.yarn/sdks/eslint/lib/config-api.js @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/config + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/config your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/config`)); diff --git a/.yarn/sdks/eslint/lib/types/config-api.d.ts b/.yarn/sdks/eslint/lib/types/config-api.d.ts new file mode 100644 index 00000000..174070b0 --- /dev/null +++ b/.yarn/sdks/eslint/lib/types/config-api.d.ts @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/config + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/config your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/config`)); diff --git a/.yarn/sdks/eslint/lib/types/index.d.ts b/.yarn/sdks/eslint/lib/types/index.d.ts new file mode 100644 index 00000000..19293d02 --- /dev/null +++ b/.yarn/sdks/eslint/lib/types/index.d.ts @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint`)); diff --git a/.yarn/sdks/eslint/lib/types/rules.d.ts b/.yarn/sdks/eslint/lib/types/rules.d.ts new file mode 100644 index 00000000..8d79c4cc --- /dev/null +++ b/.yarn/sdks/eslint/lib/types/rules.d.ts @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/rules + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/rules your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/rules`)); diff --git a/.yarn/sdks/eslint/lib/types/universal.d.ts b/.yarn/sdks/eslint/lib/types/universal.d.ts new file mode 100644 index 00000000..662b3f4f --- /dev/null +++ b/.yarn/sdks/eslint/lib/types/universal.d.ts @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/universal + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/universal your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/universal`)); diff --git a/.yarn/sdks/eslint/lib/types/use-at-your-own-risk.d.ts b/.yarn/sdks/eslint/lib/types/use-at-your-own-risk.d.ts new file mode 100644 index 00000000..2e2ccca2 --- /dev/null +++ b/.yarn/sdks/eslint/lib/types/use-at-your-own-risk.d.ts @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/use-at-your-own-risk + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/use-at-your-own-risk your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/use-at-your-own-risk`)); diff --git a/.yarn/sdks/eslint/lib/universal.js b/.yarn/sdks/eslint/lib/universal.js new file mode 100644 index 00000000..85a8ccbc --- /dev/null +++ b/.yarn/sdks/eslint/lib/universal.js @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/universal + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/universal your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/universal`)); diff --git a/.yarn/sdks/eslint/lib/unsupported-api.js b/.yarn/sdks/eslint/lib/unsupported-api.js new file mode 100644 index 00000000..c2b464ce --- /dev/null +++ b/.yarn/sdks/eslint/lib/unsupported-api.js @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/use-at-your-own-risk + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real eslint/use-at-your-own-risk your application uses +module.exports = wrapWithUserWrapper(absRequire(`eslint/use-at-your-own-risk`)); diff --git a/.yarn/sdks/eslint/package.json b/.yarn/sdks/eslint/package.json new file mode 100644 index 00000000..840be3b6 --- /dev/null +++ b/.yarn/sdks/eslint/package.json @@ -0,0 +1,31 @@ +{ + "name": "eslint", + "version": "9.37.0-sdk", + "main": "./lib/api.js", + "type": "commonjs", + "bin": { + "eslint": "./bin/eslint.js" + }, + "exports": { + ".": { + "types": "./lib/types/index.d.ts", + "default": "./lib/api.js" + }, + "./config": { + "types": "./lib/types/config-api.d.ts", + "default": "./lib/config-api.js" + }, + "./package.json": "./package.json", + "./use-at-your-own-risk": { + "types": "./lib/types/use-at-your-own-risk.d.ts", + "default": "./lib/unsupported-api.js" + }, + "./rules": { + "types": "./lib/types/rules.d.ts" + }, + "./universal": { + "types": "./lib/types/universal.d.ts", + "default": "./lib/universal.js" + } + } +} diff --git a/.yarn/sdks/integrations.yml b/.yarn/sdks/integrations.yml new file mode 100644 index 00000000..aa9d0d0a --- /dev/null +++ b/.yarn/sdks/integrations.yml @@ -0,0 +1,5 @@ +# This file is automatically generated by @yarnpkg/sdks. +# Manual changes might be lost! + +integrations: + - vscode diff --git a/.yarn/sdks/prettier/bin/prettier.cjs b/.yarn/sdks/prettier/bin/prettier.cjs new file mode 100755 index 00000000..9a4098f7 --- /dev/null +++ b/.yarn/sdks/prettier/bin/prettier.cjs @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require prettier/bin/prettier.cjs + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real prettier/bin/prettier.cjs your application uses +module.exports = wrapWithUserWrapper(absRequire(`prettier/bin/prettier.cjs`)); diff --git a/.yarn/sdks/prettier/index.cjs b/.yarn/sdks/prettier/index.cjs new file mode 100644 index 00000000..57cb2ab1 --- /dev/null +++ b/.yarn/sdks/prettier/index.cjs @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require prettier + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real prettier your application uses +module.exports = wrapWithUserWrapper(absRequire(`prettier`)); diff --git a/.yarn/sdks/prettier/package.json b/.yarn/sdks/prettier/package.json new file mode 100644 index 00000000..1488e98c --- /dev/null +++ b/.yarn/sdks/prettier/package.json @@ -0,0 +1,7 @@ +{ + "name": "prettier", + "version": "3.6.2-sdk", + "main": "./index.cjs", + "type": "commonjs", + "bin": "./bin/prettier.cjs" +} diff --git a/.yarn/sdks/typescript/bin/tsc b/.yarn/sdks/typescript/bin/tsc new file mode 100755 index 00000000..867a7bdf --- /dev/null +++ b/.yarn/sdks/typescript/bin/tsc @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require typescript/bin/tsc + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real typescript/bin/tsc your application uses +module.exports = wrapWithUserWrapper(absRequire(`typescript/bin/tsc`)); diff --git a/.yarn/sdks/typescript/bin/tsserver b/.yarn/sdks/typescript/bin/tsserver new file mode 100755 index 00000000..3fc5aa31 --- /dev/null +++ b/.yarn/sdks/typescript/bin/tsserver @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require typescript/bin/tsserver + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real typescript/bin/tsserver your application uses +module.exports = wrapWithUserWrapper(absRequire(`typescript/bin/tsserver`)); diff --git a/.yarn/sdks/typescript/lib/tsc.js b/.yarn/sdks/typescript/lib/tsc.js new file mode 100644 index 00000000..da411bdb --- /dev/null +++ b/.yarn/sdks/typescript/lib/tsc.js @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require typescript/lib/tsc.js + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real typescript/lib/tsc.js your application uses +module.exports = wrapWithUserWrapper(absRequire(`typescript/lib/tsc.js`)); diff --git a/.yarn/sdks/typescript/lib/tsserver.js b/.yarn/sdks/typescript/lib/tsserver.js new file mode 100644 index 00000000..6249c467 --- /dev/null +++ b/.yarn/sdks/typescript/lib/tsserver.js @@ -0,0 +1,248 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require typescript/lib/tsserver.js + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +const moduleWrapper = exports => { + return wrapWithUserWrapper(moduleWrapperFn(exports)); +}; + +const moduleWrapperFn = tsserver => { + if (!process.versions.pnp) { + return tsserver; + } + + const {isAbsolute} = require(`path`); + const pnpApi = require(`pnpapi`); + + const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); + const isPortal = str => str.startsWith("portal:/"); + const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); + + const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { + return `${locator.name}@${locator.reference}`; + })); + + // VSCode sends the zip paths to TS using the "zip://" prefix, that TS + // doesn't understand. This layer makes sure to remove the protocol + // before forwarding it to TS, and to add it back on all returned paths. + + function toEditorPath(str) { + // We add the `zip:` prefix to both `.zip/` paths and virtual paths + if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) { + // We also take the opportunity to turn virtual paths into physical ones; + // this makes it much easier to work with workspaces that list peer + // dependencies, since otherwise Ctrl+Click would bring us to the virtual + // file instances instead of the real ones. + // + // We only do this to modules owned by the the dependency tree roots. + // This avoids breaking the resolution when jumping inside a vendor + // with peer dep (otherwise jumping into react-dom would show resolution + // errors on react). + // + const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; + if (resolved) { + const locator = pnpApi.findPackageLocator(resolved); + if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) { + str = resolved; + } + } + + str = normalize(str); + + if (str.match(/\.zip\//)) { + switch (hostInfo) { + // Absolute VSCode `Uri.fsPath`s need to start with a slash. + // VSCode only adds it automatically for supported schemes, + // so we have to do it manually for the `zip` scheme. + // The path needs to start with a caret otherwise VSCode doesn't handle the protocol + // + // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 + // + // 2021-10-08: VSCode changed the format in 1.61. + // Before | ^zip:/c:/foo/bar.zip/package.json + // After | ^/zip//c:/foo/bar.zip/package.json + // + // 2022-04-06: VSCode changed the format in 1.66. + // Before | ^/zip//c:/foo/bar.zip/package.json + // After | ^/zip/c:/foo/bar.zip/package.json + // + // 2022-05-06: VSCode changed the format in 1.68 + // Before | ^/zip/c:/foo/bar.zip/package.json + // After | ^/zip//c:/foo/bar.zip/package.json + // + case `vscode <1.61`: { + str = `^zip:${str}`; + } break; + + case `vscode <1.66`: { + str = `^/zip/${str}`; + } break; + + case `vscode <1.68`: { + str = `^/zip${str}`; + } break; + + case `vscode`: { + str = `^/zip/${str}`; + } break; + + // To make "go to definition" work, + // We have to resolve the actual file system path from virtual path + // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) + case `coc-nvim`: { + str = normalize(resolved).replace(/\.zip\//, `.zip::`); + str = resolve(`zipfile:${str}`); + } break; + + // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) + // We have to resolve the actual file system path from virtual path, + // everything else is up to neovim + case `neovim`: { + str = normalize(resolved).replace(/\.zip\//, `.zip::`); + str = `zipfile://${str}`; + } break; + + default: { + str = `zip:${str}`; + } break; + } + } else { + str = str.replace(/^\/?/, process.platform === `win32` ? `` : `/`); + } + } + + return str; + } + + function fromEditorPath(str) { + switch (hostInfo) { + case `coc-nvim`: { + str = str.replace(/\.zip::/, `.zip/`); + // The path for coc-nvim is in format of //zipfile://.yarn/... + // So in order to convert it back, we use .* to match all the thing + // before `zipfile:` + return process.platform === `win32` + ? str.replace(/^.*zipfile:\//, ``) + : str.replace(/^.*zipfile:/, ``); + } break; + + case `neovim`: { + str = str.replace(/\.zip::/, `.zip/`); + // The path for neovim is in format of zipfile:////.yarn/... + return str.replace(/^zipfile:\/\//, ``); + } break; + + case `vscode`: + default: { + return str.replace(/^\^?(zip:|\/zip(\/ts-nul-authority)?)\/+/, process.platform === `win32` ? `` : `/`) + } break; + } + } + + // Force enable 'allowLocalPluginLoads' + // TypeScript tries to resolve plugins using a path relative to itself + // which doesn't work when using the global cache + // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 + // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but + // TypeScript already does local loads and if this code is running the user trusts the workspace + // https://github.com/microsoft/vscode/issues/45856 + const ConfiguredProject = tsserver.server.ConfiguredProject; + const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; + ConfiguredProject.prototype.enablePluginsWithOptions = function() { + this.projectService.allowLocalPluginLoads = true; + return originalEnablePluginsWithOptions.apply(this, arguments); + }; + + // And here is the point where we hijack the VSCode <-> TS communications + // by adding ourselves in the middle. We locate everything that looks + // like an absolute path of ours and normalize it. + + const Session = tsserver.server.Session; + const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; + let hostInfo = `unknown`; + + Object.assign(Session.prototype, { + onMessage(/** @type {string | object} */ message) { + const isStringMessage = typeof message === 'string'; + const parsedMessage = isStringMessage ? JSON.parse(message) : message; + + if ( + parsedMessage != null && + typeof parsedMessage === `object` && + parsedMessage.arguments && + typeof parsedMessage.arguments.hostInfo === `string` + ) { + hostInfo = parsedMessage.arguments.hostInfo; + if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) { + const [, major, minor] = (process.env.VSCODE_IPC_HOOK.match( + // The RegExp from https://semver.org/ but without the caret at the start + /(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/ + ) ?? []).map(Number) + + if (major === 1) { + if (minor < 61) { + hostInfo += ` <1.61`; + } else if (minor < 66) { + hostInfo += ` <1.66`; + } else if (minor < 68) { + hostInfo += ` <1.68`; + } + } + } + } + + const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => { + return typeof value === 'string' ? fromEditorPath(value) : value; + }); + + return originalOnMessage.call( + this, + isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON) + ); + }, + + send(/** @type {any} */ msg) { + return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { + return typeof value === `string` ? toEditorPath(value) : value; + }))); + } + }); + + return tsserver; +}; + +const [major, minor] = absRequire(`typescript/package.json`).version.split(`.`, 2).map(value => parseInt(value, 10)); +// In TypeScript@>=5.5 the tsserver uses the public TypeScript API so that needs to be patched as well. +// Ref https://github.com/microsoft/TypeScript/pull/55326 +if (major > 5 || (major === 5 && minor >= 5)) { + moduleWrapper(absRequire(`typescript`)); +} + +// Defer to the real typescript/lib/tsserver.js your application uses +module.exports = moduleWrapper(absRequire(`typescript/lib/tsserver.js`)); diff --git a/.yarn/sdks/typescript/lib/tsserverlibrary.js b/.yarn/sdks/typescript/lib/tsserverlibrary.js new file mode 100644 index 00000000..0e50e0a2 --- /dev/null +++ b/.yarn/sdks/typescript/lib/tsserverlibrary.js @@ -0,0 +1,248 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require typescript/lib/tsserverlibrary.js + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +const moduleWrapper = exports => { + return wrapWithUserWrapper(moduleWrapperFn(exports)); +}; + +const moduleWrapperFn = tsserver => { + if (!process.versions.pnp) { + return tsserver; + } + + const {isAbsolute} = require(`path`); + const pnpApi = require(`pnpapi`); + + const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); + const isPortal = str => str.startsWith("portal:/"); + const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); + + const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { + return `${locator.name}@${locator.reference}`; + })); + + // VSCode sends the zip paths to TS using the "zip://" prefix, that TS + // doesn't understand. This layer makes sure to remove the protocol + // before forwarding it to TS, and to add it back on all returned paths. + + function toEditorPath(str) { + // We add the `zip:` prefix to both `.zip/` paths and virtual paths + if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) { + // We also take the opportunity to turn virtual paths into physical ones; + // this makes it much easier to work with workspaces that list peer + // dependencies, since otherwise Ctrl+Click would bring us to the virtual + // file instances instead of the real ones. + // + // We only do this to modules owned by the the dependency tree roots. + // This avoids breaking the resolution when jumping inside a vendor + // with peer dep (otherwise jumping into react-dom would show resolution + // errors on react). + // + const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; + if (resolved) { + const locator = pnpApi.findPackageLocator(resolved); + if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) { + str = resolved; + } + } + + str = normalize(str); + + if (str.match(/\.zip\//)) { + switch (hostInfo) { + // Absolute VSCode `Uri.fsPath`s need to start with a slash. + // VSCode only adds it automatically for supported schemes, + // so we have to do it manually for the `zip` scheme. + // The path needs to start with a caret otherwise VSCode doesn't handle the protocol + // + // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 + // + // 2021-10-08: VSCode changed the format in 1.61. + // Before | ^zip:/c:/foo/bar.zip/package.json + // After | ^/zip//c:/foo/bar.zip/package.json + // + // 2022-04-06: VSCode changed the format in 1.66. + // Before | ^/zip//c:/foo/bar.zip/package.json + // After | ^/zip/c:/foo/bar.zip/package.json + // + // 2022-05-06: VSCode changed the format in 1.68 + // Before | ^/zip/c:/foo/bar.zip/package.json + // After | ^/zip//c:/foo/bar.zip/package.json + // + case `vscode <1.61`: { + str = `^zip:${str}`; + } break; + + case `vscode <1.66`: { + str = `^/zip/${str}`; + } break; + + case `vscode <1.68`: { + str = `^/zip${str}`; + } break; + + case `vscode`: { + str = `^/zip/${str}`; + } break; + + // To make "go to definition" work, + // We have to resolve the actual file system path from virtual path + // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) + case `coc-nvim`: { + str = normalize(resolved).replace(/\.zip\//, `.zip::`); + str = resolve(`zipfile:${str}`); + } break; + + // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) + // We have to resolve the actual file system path from virtual path, + // everything else is up to neovim + case `neovim`: { + str = normalize(resolved).replace(/\.zip\//, `.zip::`); + str = `zipfile://${str}`; + } break; + + default: { + str = `zip:${str}`; + } break; + } + } else { + str = str.replace(/^\/?/, process.platform === `win32` ? `` : `/`); + } + } + + return str; + } + + function fromEditorPath(str) { + switch (hostInfo) { + case `coc-nvim`: { + str = str.replace(/\.zip::/, `.zip/`); + // The path for coc-nvim is in format of //zipfile://.yarn/... + // So in order to convert it back, we use .* to match all the thing + // before `zipfile:` + return process.platform === `win32` + ? str.replace(/^.*zipfile:\//, ``) + : str.replace(/^.*zipfile:/, ``); + } break; + + case `neovim`: { + str = str.replace(/\.zip::/, `.zip/`); + // The path for neovim is in format of zipfile:////.yarn/... + return str.replace(/^zipfile:\/\//, ``); + } break; + + case `vscode`: + default: { + return str.replace(/^\^?(zip:|\/zip(\/ts-nul-authority)?)\/+/, process.platform === `win32` ? `` : `/`) + } break; + } + } + + // Force enable 'allowLocalPluginLoads' + // TypeScript tries to resolve plugins using a path relative to itself + // which doesn't work when using the global cache + // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 + // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but + // TypeScript already does local loads and if this code is running the user trusts the workspace + // https://github.com/microsoft/vscode/issues/45856 + const ConfiguredProject = tsserver.server.ConfiguredProject; + const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; + ConfiguredProject.prototype.enablePluginsWithOptions = function() { + this.projectService.allowLocalPluginLoads = true; + return originalEnablePluginsWithOptions.apply(this, arguments); + }; + + // And here is the point where we hijack the VSCode <-> TS communications + // by adding ourselves in the middle. We locate everything that looks + // like an absolute path of ours and normalize it. + + const Session = tsserver.server.Session; + const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; + let hostInfo = `unknown`; + + Object.assign(Session.prototype, { + onMessage(/** @type {string | object} */ message) { + const isStringMessage = typeof message === 'string'; + const parsedMessage = isStringMessage ? JSON.parse(message) : message; + + if ( + parsedMessage != null && + typeof parsedMessage === `object` && + parsedMessage.arguments && + typeof parsedMessage.arguments.hostInfo === `string` + ) { + hostInfo = parsedMessage.arguments.hostInfo; + if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) { + const [, major, minor] = (process.env.VSCODE_IPC_HOOK.match( + // The RegExp from https://semver.org/ but without the caret at the start + /(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/ + ) ?? []).map(Number) + + if (major === 1) { + if (minor < 61) { + hostInfo += ` <1.61`; + } else if (minor < 66) { + hostInfo += ` <1.66`; + } else if (minor < 68) { + hostInfo += ` <1.68`; + } + } + } + } + + const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => { + return typeof value === 'string' ? fromEditorPath(value) : value; + }); + + return originalOnMessage.call( + this, + isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON) + ); + }, + + send(/** @type {any} */ msg) { + return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { + return typeof value === `string` ? toEditorPath(value) : value; + }))); + } + }); + + return tsserver; +}; + +const [major, minor] = absRequire(`typescript/package.json`).version.split(`.`, 2).map(value => parseInt(value, 10)); +// In TypeScript@>=5.5 the tsserver uses the public TypeScript API so that needs to be patched as well. +// Ref https://github.com/microsoft/TypeScript/pull/55326 +if (major > 5 || (major === 5 && minor >= 5)) { + moduleWrapper(absRequire(`typescript`)); +} + +// Defer to the real typescript/lib/tsserverlibrary.js your application uses +module.exports = moduleWrapper(absRequire(`typescript/lib/tsserverlibrary.js`)); diff --git a/.yarn/sdks/typescript/lib/typescript.js b/.yarn/sdks/typescript/lib/typescript.js new file mode 100644 index 00000000..7b6cc220 --- /dev/null +++ b/.yarn/sdks/typescript/lib/typescript.js @@ -0,0 +1,32 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, register} = require(`module`); +const {resolve} = require(`path`); +const {pathToFileURL} = require(`url`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absUserWrapperPath = resolve(__dirname, `./sdk.user.cjs`); +const absRequire = createRequire(absPnpApiPath); + +const absPnpLoaderPath = resolve(absPnpApiPath, `../.pnp.loader.mjs`); +const isPnpLoaderEnabled = existsSync(absPnpLoaderPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require typescript + require(absPnpApiPath).setup(); + if (isPnpLoaderEnabled && register) { + register(pathToFileURL(absPnpLoaderPath)); + } + } +} + +const wrapWithUserWrapper = existsSync(absUserWrapperPath) + ? exports => absRequire(absUserWrapperPath)(exports) + : exports => exports; + +// Defer to the real typescript your application uses +module.exports = wrapWithUserWrapper(absRequire(`typescript`)); diff --git a/.yarn/sdks/typescript/package.json b/.yarn/sdks/typescript/package.json new file mode 100644 index 00000000..19a0b6f1 --- /dev/null +++ b/.yarn/sdks/typescript/package.json @@ -0,0 +1,10 @@ +{ + "name": "typescript", + "version": "5.9.3-sdk", + "main": "./lib/typescript.js", + "type": "commonjs", + "bin": { + "tsc": "./bin/tsc", + "tsserver": "./bin/tsserver" + } +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..cf153fe9 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,40 @@ +## Codebase Structure + +- [`src`](src) + - [`index.ts`](src/index.ts) is the entry point of the library. + - [`tensor.ts`](src/tensor.ts) is the main tensor class. + - [`function`](function) contains all functions that tensors can perform. + - [`nn`](nn) contains all neural network modules (for everything under `torch.nn`). + - [`optim`](optim) contains all optimizers (for everything under `torch.optim`). + - [`creation`](creation) contains all tensor creation functions (all functions that create a tensor not from scratch, including `zeros`, `randn`). +- [`examples`](examples) contains example usages of the library, including on node, on the browser, and using pyodide on the browser. +- [`test`](test) contains the test cases of the library, including on node and on the browser. See [Testing](#testing). + +### Development Scripts + +You can use `yarn watch` to automatically test after each edit. + +### Adding a new Function + +To add a new function, add it to [`src/functions/ops.ts`](src/functions/ops.ts). + +- To allow for `torch.(, )`, add it as well to [`src/functions/functional.ts`](src/functions/functional.ts). +- To allow for `.()`, add it as well to [`src/tensor.ts`](src/tensor.ts) as a `tensor` method. + +## Testing + +Tests are run using `mocha`. + +- Node: To test on node, run `yarn test`. +- Browser: To test on browser, run `yarn serve` (after `yarn build` if necessary) and navigate to http://localhost:8080/test/. + +To create a new test: + +1. Create a new `.test.js`/`.test.ts` file in [`test`](test) or write your new test in one of the existing files. +2. If you created a new file and would like to test it on the browser, add it to [`test/index.html`](test/index.html). + +## Documentation + +To see docs, run `yarn docs` to build, and run `yarn serve docs` to serve docs on http://localhost:8080/. + +To ensure familiarity with PyTorch, docs should be derived from the PyTorch docs, as mentioned in [`NOTICE`](NOTICE). diff --git a/NOTICE b/NOTICE new file mode 100644 index 00000000..5949bbc6 --- /dev/null +++ b/NOTICE @@ -0,0 +1,40 @@ +This project's licensing is Apache-2.0. It includes components +governed by other licenses, which are detailed below. + +================================================================================ +1. PyTorch Documentation (BSD-3-Clause license) + +https://github.com/pytorch/docs + +Portions of this project's documentation are derived from the PyTorch documentation. +The original PyTorch documentation license is included below: + +BSD 3-Clause License + +Copyright (c) 2018, Facebook Inc +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md index 08cb1b83..3d39c830 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,51 @@ # torch machine-learning libraries for Source Academy + +## Getting Started + +Install yarn: + +``` bash +corepack enable +# or +npm install -g yarn +``` + +Install dependencies: + +```bash +yarn install +``` + +## Demo Usage + +First, build it: + +```bash +yarn build +``` + +See [examples/](examples/) for examples. + +### Node + +See `examples/basic_backpropagation.js`. + +```bash +node examples/basic_backpropagation.js +``` + +### Browser + +You can run `http-server` and load `examples/browser/index.html` to see how it works. + +```bash +yarn serve +# and navigate to http://localhost:8080/examples/browser/index.html to run torch in js +# or http://localhost:8080/examples/pyodide/index.html to run in python +# or http://localhost:8080/test/ to run the tests +``` + +## Contributing + +For detailed information on the codebase and tests, see [CONTRIBUTING.md](CONTRIBUTING.md). diff --git a/eslint.config.mjs b/eslint.config.mjs new file mode 100644 index 00000000..784224be --- /dev/null +++ b/eslint.config.mjs @@ -0,0 +1,19 @@ +// @ts-check + +import eslint from '@eslint/js'; +import { defineConfig } from 'eslint/config'; +import tseslint from 'typescript-eslint'; + +export default defineConfig(eslint.configs.recommended, tseslint.configs.recommended, { + rules: { + '@typescript-eslint/no-unused-vars': [ + 'warn', + { + argsIgnorePattern: '^_', + varsIgnorePattern: '(^_)|(^[A-Z])', + caughtErrorsIgnorePattern: '^_', + ignoreRestSiblings: true + } + ] + } +}); diff --git a/examples/basic_backpropagation.js b/examples/basic_backpropagation.js new file mode 100644 index 00000000..8274ac2d --- /dev/null +++ b/examples/basic_backpropagation.js @@ -0,0 +1,9 @@ +import { Tensor } from '../build/node/torch.node.es.js'; + +const x = new Tensor([2.0], { requires_grad: true }); +const y = x.pow(new Tensor([2.0])); + +console.log(y.item()); +y.backward(); + +console.log(x.grad?.item()); diff --git a/examples/browser/index.html b/examples/browser/index.html new file mode 100644 index 00000000..fad64e69 --- /dev/null +++ b/examples/browser/index.html @@ -0,0 +1,32 @@ + + + + + + + torch.js browser backprop example + + + + +

+
+    
+
+
+
diff --git a/examples/index.html b/examples/index.html
new file mode 100644
index 00000000..70308ffb
--- /dev/null
+++ b/examples/index.html
@@ -0,0 +1,18 @@
+
+
+
+
+    
+    
+    Examples
+
+
+
+    

Examples

+ + + + diff --git a/examples/pyodide/.gitignore b/examples/pyodide/.gitignore new file mode 100644 index 00000000..2aa8c99a --- /dev/null +++ b/examples/pyodide/.gitignore @@ -0,0 +1,144 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.* +!.env.example + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist +.output + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Sveltekit cache directory +.svelte-kit/ + +# vitepress build output +**/.vitepress/dist + +# vitepress cache directory +**/.vitepress/cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# Firebase cache directory +.firebase/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# pnpm +.pnpm-store + +# yarn v3 +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/sdks +!.yarn/versions + +# Vite files +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +.vite/ diff --git a/examples/pyodide/bridge.py b/examples/pyodide/bridge.py new file mode 100644 index 00000000..872c2598 --- /dev/null +++ b/examples/pyodide/bridge.py @@ -0,0 +1,805 @@ +# bridge.py +# Provides a PyTorch-compatible Python API over js_torch (the TypeScript torch library). +# +# Before loading this file, set the following globals in Pyodide: +# js_torch - the torch module (window.torch from the UMD build) + +from pyodide.ffi import JsProxy, to_js + + +# --------------------------------------------------------------------------- +# Internal helpers +# --------------------------------------------------------------------------- + +def _wrap_result(result): + """ + Wrap a JS return value: + - JsProxy (JS object/Tensor) -> Python Tensor + - Python primitive (int, float, bool) -> return as-is + JS primitives are automatically converted to Python by Pyodide, + so they will NOT be JsProxy instances. + """ + if isinstance(result, JsProxy): + return Tensor(result) + return result + + +def _transform(obj): + """Convert Python objects to JS-compatible types before passing to JS.""" + if isinstance(obj, Tensor): + return obj._js + if isinstance(obj, (list, tuple)): + return to_js([_transform(item) for item in obj]) + return obj + + +def _transform_args(args): + return [_transform(a) for a in args] + + +# --------------------------------------------------------------------------- +# Tensor +# --------------------------------------------------------------------------- + +class Tensor: + """Python wrapper around a JS Tensor, mirroring the PyTorch Tensor API.""" + + # ------------------------------------------------------------------ + # Construction + # ------------------------------------------------------------------ + + def __new__(cls, data, requires_grad=False): + # Return None for missing tensors so e.g. `tensor.grad` returns None + # when there is no gradient — matching PyTorch behaviour. + # Pyodide may represent JS null as a special JsNull type (not JsProxy, not None). + if data is None or type(data).__name__ in ('JsNull', 'JsUndefined'): + return None + return super().__new__(cls) + + def __init__(self, data, requires_grad=False): + if isinstance(data, JsProxy): + self._js = data + else: + js_data = to_js(data) if isinstance(data, (list, tuple)) else data + self._js = js_torch.tensor(js_data, requires_grad) + + # ------------------------------------------------------------------ + # Representation + # ------------------------------------------------------------------ + + def __repr__(self): + extra = ", requires_grad=True" if self.requires_grad else "" + return f"tensor({self.tolist()}{extra})" + + # ------------------------------------------------------------------ + # Data access + # ------------------------------------------------------------------ + + def tolist(self): + """Return tensor data as a (nested) Python list, or a Python scalar for 0-d tensors.""" + result = self._js.toArray() + if isinstance(result, JsProxy): + return result.to_py() + return result # scalar + + def item(self): + return self._js.item() + + # ------------------------------------------------------------------ + # Properties + # ------------------------------------------------------------------ + + @property + def shape(self): + return tuple(self._js.shape.to_py()) + + @property + def data(self): + """Detached view of the tensor data (no gradient).""" + return self.detach() + + @property + def requires_grad(self): + return bool(self._js.requires_grad) + + @requires_grad.setter + def requires_grad(self, value): + self._js.requires_grad = value + + @property + def grad(self): + raw = self._js.grad + if raw is None or type(raw).__name__ in ('JsNull', 'JsUndefined'): + return None + return Tensor(raw) + + @grad.setter + def grad(self, value): + self._js.grad = value._js if isinstance(value, Tensor) else None + + @property + def T(self): + if len(self.shape) < 2: + return self + return Tensor(self._js.transpose(0, 1)) + + # ------------------------------------------------------------------ + # Grad utilities + # ------------------------------------------------------------------ + + def backward(self, gradient=None): + if gradient is None: + self._js.backward() + else: + self._js.backward(gradient._js) + + def detach(self): + return Tensor(self._js.detach()) + + def zero_(self): + self._js.zero_() + return self + + def retain_grad(self): + self._js.retain_grad() + + # ------------------------------------------------------------------ + # Shape utilities + # ------------------------------------------------------------------ + + def size(self, dim=None): + s = self.shape + return s if dim is None else s[dim] + + def dim(self): + return len(self.shape) + + def numel(self): + n = 1 + for s in self.shape: + n *= s + return n + + def reshape(self, *args): + shape = list(args[0]) if len(args) == 1 and isinstance(args[0], (list, tuple)) else list(args) + return Tensor(self._js.reshape(to_js(shape))) + + def view(self, *args): + return self.reshape(*args) + + def squeeze(self, dim=None): + if dim is None: + new_shape = [s for s in self.shape if s != 1] + return Tensor(self._js.reshape(to_js(new_shape or [1]))) + return Tensor(self._js.squeeze(dim)) + + def unsqueeze(self, dim): + return Tensor(self._js.unsqueeze(dim)) + + def expand(self, *args): + shape = list(args[0]) if len(args) == 1 and isinstance(args[0], (list, tuple)) else list(args) + return Tensor(self._js.expand(to_js(shape))) + + def transpose(self, dim0, dim1): + return Tensor(self._js.transpose(dim0, dim1)) + + def flatten(self, start_dim=0, end_dim=-1): + return Tensor(self._js.flatten(start_dim, end_dim)) + + # ------------------------------------------------------------------ + # Reductions — default (no dim) sums all elements, matching PyTorch + # ------------------------------------------------------------------ + + def sum(self, dim=None, keepdim=False): + return Tensor(self._js.sum() if dim is None else self._js.sum(dim, keepdim)) + + def mean(self, dim=None, keepdim=False): + return Tensor(self._js.mean() if dim is None else self._js.mean(dim, keepdim)) + + def max(self, dim=None, keepdim=False): + return Tensor(self._js.max() if dim is None else self._js.max(dim, keepdim)) + + def min(self, dim=None, keepdim=False): + return Tensor(self._js.min() if dim is None else self._js.min(dim, keepdim)) + + # ------------------------------------------------------------------ + # Arithmetic — explicit methods + # ------------------------------------------------------------------ + + def _to_js(self, other): + return other._js if isinstance(other, Tensor) else other + + def add(self, other): return Tensor(self._js.add(self._to_js(other))) + def sub(self, other): return Tensor(self._js.sub(self._to_js(other))) + def mul(self, other): return Tensor(self._js.mul(self._to_js(other))) + def div(self, other): return Tensor(self._js.div(self._to_js(other))) + def pow(self, other): return Tensor(self._js.pow(self._to_js(other))) + def matmul(self, other): return Tensor(self._js.matmul(self._to_js(other))) + + # ------------------------------------------------------------------ + # Arithmetic operators + # ------------------------------------------------------------------ + + def __add__(self, other): return self.add(other) + def __radd__(self, other): return self.add(other) # add is commutative + def __sub__(self, other): return self.sub(other) + def __rsub__(self, other): + o = other if isinstance(other, Tensor) else Tensor(other) + return o.sub(self) + def __mul__(self, other): return self.mul(other) + def __rmul__(self, other): return self.mul(other) # mul is commutative + def __truediv__(self, other): return self.div(other) + def __rtruediv__(self, other): + o = other if isinstance(other, Tensor) else Tensor(other) + return o.div(self) + def __pow__(self, other): return self.pow(other) + def __rpow__(self, other): + o = other if isinstance(other, Tensor) else Tensor(other) + return o.pow(self) + def __matmul__(self, other): return self.matmul(other) + def __neg__(self): return Tensor(self._js.neg()) + def __abs__(self): return Tensor(self._js.abs()) + + # ------------------------------------------------------------------ + # Unary operations + # ------------------------------------------------------------------ + + def neg(self): return Tensor(self._js.neg()) + def abs(self): return Tensor(self._js.abs()) + def log(self): return Tensor(self._js.log()) + def exp(self): return Tensor(self._js.exp()) + def sqrt(self): return Tensor(self._js.sqrt()) + def square(self): return Tensor(self._js.square()) + def sin(self): return Tensor(self._js.sin()) + def cos(self): return Tensor(self._js.cos()) + def tan(self): return Tensor(self._js.tan()) + def sigmoid(self): return Tensor(self._js.sigmoid()) + def relu(self): return Tensor(js_torch.nn.functional.relu(self._js)) + def softmax(self, dim): return Tensor(self._js.softmax(dim)) + def clamp(self, min, max): return Tensor(self._js.clamp(min, max)) + def sign(self): return Tensor(self._js.sign()) + def reciprocal(self): return Tensor(self._js.reciprocal()) + def nan_to_num(self): return Tensor(self._js.nan_to_num()) + + # ------------------------------------------------------------------ + # Comparison + # ------------------------------------------------------------------ + + def lt(self, other): return Tensor(self._js.lt(self._to_js(other))) + def gt(self, other): return Tensor(self._js.gt(self._to_js(other))) + def le(self, other): return Tensor(self._js.le(self._to_js(other))) + def ge(self, other): return Tensor(self._js.ge(self._to_js(other))) + def eq(self, other): return Tensor(self._js.eq(self._to_js(other))) + def ne(self, other): return Tensor(self._js.ne(self._to_js(other))) + + def allclose(self, other, rtol=1e-5, atol=1e-8, equal_nan=False): + return bool(js_torch.allclose(self._js, other._js, rtol, atol, equal_nan)) + + # ------------------------------------------------------------------ + # Type conversions + # ------------------------------------------------------------------ + + def __float__(self): return float(self.item()) + def __int__(self): return int(self.item()) + def __bool__(self): return bool(self.item()) + def __format__(self, fmt): return format(self.item(), fmt) + + # ------------------------------------------------------------------ + # Indexing + # ------------------------------------------------------------------ + + def __getitem__(self, key): + if isinstance(key, int): + return Tensor(self._js.index(key)) + if isinstance(key, tuple): + result = self._js + for k in key: + if isinstance(k, int): + result = result.index(k) + else: + raise NotImplementedError( + "Only integer indexing is supported in multi-dimensional indexing" + ) + return Tensor(result) + if isinstance(key, slice): + start, stop, step = key.indices(self.shape[0]) + data = [Tensor(self._js.index(i)).tolist() for i in range(start, stop, step)] + return Tensor(data) + raise TypeError(f"Invalid index type: {type(key).__name__}") + + # ------------------------------------------------------------------ + # Iteration and length + # ------------------------------------------------------------------ + + def __len__(self): + return self.shape[0] + + def __iter__(self): + data = self.tolist() + if not isinstance(data, list): + raise TypeError("iteration over a 0-d tensor") + for item in data: + yield Tensor(item) + + # ------------------------------------------------------------------ + # Catch-all: delegate unknown attribute accesses to the JS tensor. + # Returned JsProxy objects are wrapped in Tensor; primitives pass through. + # ------------------------------------------------------------------ + + def __getattr__(self, name): + if name.startswith('_'): + raise AttributeError(name) + def method(*args, **kwargs): + js_args = _transform_args(args) + return _wrap_result(self._js.__getattribute__(name)(*js_args)) + return method + + +# --------------------------------------------------------------------------- +# Typed tensor subclasses +# --------------------------------------------------------------------------- + +def _trunc_nested(data): + """Truncate all numbers in a nested list toward zero (for LongTensor).""" + if isinstance(data, (int, float)): + return int(data) # Python int() truncates toward zero + return [_trunc_nested(item) for item in data] + + +class FloatTensor(Tensor): + """ + A Tensor that stores floating-point values. + Equivalent to a regular Tensor; provided for PyTorch API compatibility. + """ + def __init__(self, data, requires_grad=False): + if isinstance(data, JsProxy): + super().__init__(data) + else: + super().__init__(data, requires_grad) + + +class LongTensor(Tensor): + """ + A Tensor whose values are truncated to integers (toward zero). + LongTensor([-1.7]) -> tensor([-1]), LongTensor([1.9]) -> tensor([1]). + """ + def __init__(self, data, requires_grad=False): + if isinstance(data, JsProxy): + super().__init__(data) + else: + truncated = _trunc_nested(data) if isinstance(data, (list, tuple)) else int(data) + super().__init__(truncated, requires_grad) + + +# --------------------------------------------------------------------------- +# no_grad context manager — actually disables grad in the JS engine +# --------------------------------------------------------------------------- + +class _NoGrad: + def __enter__(self): + self._prev = js_torch.enable_no_grad() + return self + + def __exit__(self, *args): + js_torch.disable_no_grad(self._prev) + + +# --------------------------------------------------------------------------- +# Parameter +# --------------------------------------------------------------------------- + +class Parameter(Tensor): + """A Tensor that is automatically registered as a parameter.""" + def __init__(self, data, requires_grad=True): + if isinstance(data, Tensor): + self._js = js_torch.nn.Parameter.new(data._js) + elif isinstance(data, JsProxy): + self._js = js_torch.nn.Parameter.new(data) + else: + self._js = js_torch.nn.Parameter.new(js_torch.tensor(data)) + if not requires_grad: + self._js.requires_grad = False + + +# --------------------------------------------------------------------------- +# Module — pure-Python base class for user-defined models +# --------------------------------------------------------------------------- + +class Module: + """ + Pure-Python nn.Module. Subclass this to build models using bridge Tensors. + Assign `Parameter` or `_NNModule` instances as attributes and they are + automatically tracked by `parameters()`. + """ + + def __init__(self): + object.__setattr__(self, '_parameters', {}) + object.__setattr__(self, '_modules', {}) + object.__setattr__(self, 'training', True) + + def __setattr__(self, name, value): + try: + params = object.__getattribute__(self, '_parameters') + modules = object.__getattribute__(self, '_modules') + except AttributeError: + object.__setattr__(self, name, value) + return + + if isinstance(value, Parameter): + params[name] = value + elif isinstance(value, (Module, _NNModule)): + modules[name] = value + object.__setattr__(self, name, value) + + def __call__(self, *args, **kwargs): + return self.forward(*args, **kwargs) + + def forward(self, *args, **kwargs): + raise NotImplementedError + + def parameters(self): + params = list(object.__getattribute__(self, '_parameters').values()) + for mod in object.__getattribute__(self, '_modules').values(): + params.extend(mod.parameters()) + return params + + def named_parameters(self, prefix=''): + result = [] + for name, p in object.__getattribute__(self, '_parameters').items(): + full = f"{prefix}.{name}" if prefix else name + result.append((full, p)) + for mod_name, mod in object.__getattribute__(self, '_modules').items(): + full_mod = f"{prefix}.{mod_name}" if prefix else mod_name + result.extend(mod.named_parameters(full_mod)) + return result + + def train(self, mode=True): + object.__setattr__(self, 'training', mode) + for mod in object.__getattribute__(self, '_modules').values(): + mod.train(mode) + return self + + def eval(self): + return self.train(False) + + def zero_grad(self): + for p in self.parameters(): + p.grad = None + + +# --------------------------------------------------------------------------- +# _NNModule — wraps a JS nn.Module instance +# --------------------------------------------------------------------------- + +class _NNModule: + """Wraps a JS nn.Module returned by the nn factory functions.""" + + def __init__(self, js_module): + self._module = js_module + + def __call__(self, *args): + js_args = [a._js if isinstance(a, Tensor) else a for a in args] + return Tensor(self._module.call(*js_args)) + + def forward(self, *args): + js_args = [a._js if isinstance(a, Tensor) else a for a in args] + return Tensor(self._module.forward(*js_args)) + + def parameters(self): + return [Tensor(p) for p in self._module.parameters().to_py()] + + def named_parameters(self, prefix=''): + raw = self._module.named_parameters(prefix).to_py() + return [(pair[0], Tensor(pair[1])) for pair in raw] + + def train(self, mode=True): + self._module.train(mode) + return self + + def eval(self): + return self.train(False) + + def zero_grad(self): + for p in self.parameters(): + p.grad = None + + +# --------------------------------------------------------------------------- +# nn.functional +# --------------------------------------------------------------------------- + +class _NNFunctional: + def relu(self, input): + return Tensor(js_torch.nn.functional.relu(input._js)) + + def sigmoid(self, input): + return Tensor(js_torch.nn.functional.sigmoid(input._js)) + + def leaky_relu(self, input, negative_slope=0.01): + return Tensor(js_torch.nn.functional.leaky_relu(input._js, negative_slope)) + + def max_pool2d(self, input, kernel_size, stride=None, padding=0): + if stride is None: + return Tensor(js_torch.nn.functional.max_pool2d(input._js, kernel_size)) + return Tensor(js_torch.nn.functional.max_pool2d(input._js, kernel_size, stride, padding)) + + def nll_loss(self, input, target, reduction='mean'): + return Tensor(js_torch.nn.functional.nll_loss(input._js, target._js, reduction)) + + def __getattr__(self, name): + if name.startswith('_'): + raise AttributeError(name) + def fn(*args, **kwargs): + return _wrap_result(js_torch.nn.functional.__getattribute__(name)(*_transform_args(args))) + return fn + + +# --------------------------------------------------------------------------- +# nn.parameter namespace +# --------------------------------------------------------------------------- + +class _NNParameterNamespace: + def __init__(self): + self.Parameter = Parameter + + +# --------------------------------------------------------------------------- +# nn namespace +# --------------------------------------------------------------------------- + +class _NNNamespace: + def __init__(self): + self.functional = _NNFunctional() + self.parameter = _NNParameterNamespace() + self.Module = Module + self.Parameter = Parameter + + def Linear(self, in_features, out_features, bias=True): + return _NNModule(js_torch.nn.Linear.new(in_features, out_features, bias)) + + def ReLU(self): + return _NNModule(js_torch.nn.ReLU.new()) + + def Sigmoid(self): + return _NNModule(js_torch.nn.Sigmoid.new()) + + def Sequential(self, *modules): + js_mods = [m._module for m in modules] + return _NNModule(js_torch.nn.Sequential.new(*js_mods)) + + def MSELoss(self, reduction='mean'): + return _NNModule(js_torch.nn.MSELoss.new(reduction)) + + def L1Loss(self, reduction='mean'): + return _NNModule(js_torch.nn.L1Loss.new(reduction)) + + def BCELoss(self, weight=None, reduction='mean'): + js_weight = weight._js if isinstance(weight, Tensor) else None + return _NNModule(js_torch.nn.BCELoss.new(js_weight, reduction)) + + def CrossEntropyLoss(self, reduction='mean'): + return _NNModule(js_torch.nn.CrossEntropyLoss.new(reduction)) + + def Conv1d(self, in_channels, out_channels, kernel_size, + stride=1, padding=0, dilation=1, groups=1, bias=True): + return _NNModule(js_torch.nn.Conv1d.new( + in_channels, out_channels, kernel_size, + stride, padding, dilation, groups, bias + )) + + def Conv2d(self, in_channels, out_channels, kernel_size, + stride=1, padding=0, dilation=1, groups=1, bias=True): + return _NNModule(js_torch.nn.Conv2d.new( + in_channels, out_channels, kernel_size, + stride, padding, dilation, groups, bias + )) + + def Conv3d(self, in_channels, out_channels, kernel_size, + stride=1, padding=0, dilation=1, groups=1, bias=True): + return _NNModule(js_torch.nn.Conv3d.new( + in_channels, out_channels, kernel_size, + stride, padding, dilation, groups, bias + )) + + def LeakyReLU(self, negative_slope=0.01): + return _NNModule(js_torch.nn.LeakyReLU.new(negative_slope)) + + def MaxPool2d(self, kernel_size, stride=None, padding=0): + if stride is None: + return _NNModule(js_torch.nn.MaxPool2d.new(kernel_size)) + return _NNModule(js_torch.nn.MaxPool2d.new(kernel_size, stride, padding)) + + def Dropout(self, p=0.5): + return _NNModule(js_torch.nn.Dropout.new(p)) + + def Softmax(self, dim): + return _NNModule(js_torch.nn.Softmax.new(dim)) + + def Flatten(self, start_dim=1, end_dim=-1): + return _NNModule(js_torch.nn.Flatten.new(start_dim, end_dim)) + + def NLLLoss(self, reduction='mean'): + return _NNModule(js_torch.nn.NLLLoss.new(reduction)) + + +# --------------------------------------------------------------------------- +# optim wrappers +# --------------------------------------------------------------------------- + +class _Optimizer: + def __init__(self, js_optim): + self._optim = js_optim + + def step(self): + self._optim.step() + + def zero_grad(self): + self._optim.zero_grad() + + +class _OptimNamespace: + def SGD(self, params, lr=0.001, momentum=0.0, dampening=0.0, + weight_decay=0.0, nesterov=False, maximize=False): + js_params = to_js([p._js for p in params]) + return _Optimizer(js_torch.optim.SGD.new( + js_params, lr, momentum, dampening, weight_decay, nesterov, maximize + )) + + def Adam(self, params, lr=0.001, betas=(0.9, 0.999), eps=1e-8, + weight_decay=0.0, amsgrad=False, maximize=False): + js_params = to_js([p._js for p in params]) + js_betas = to_js(list(betas)) + return _Optimizer(js_torch.optim.Adam.new( + js_params, lr, js_betas, eps, weight_decay, amsgrad, maximize + )) + + def Adagrad(self, params, lr=0.01, lr_decay=0, weight_decay=0, eps=1e-10): + js_params = to_js([p._js for p in params]) + return _Optimizer(js_torch.optim.Adagrad.new( + js_params, lr, lr_decay, weight_decay, eps + )) + + +# --------------------------------------------------------------------------- +# torch namespace +# --------------------------------------------------------------------------- + +class _Torch: + def __init__(self): + self.nn = _NNNamespace() + self.optim = _OptimNamespace() + self.no_grad = _NoGrad + self.Tensor = Tensor + self.FloatTensor = FloatTensor + self.LongTensor = LongTensor + + @property + def tensor(self): + return Tensor + + # --- creation functions --- + + def _shape_from_args(self, args): + return list(args[0]) if len(args) == 1 and isinstance(args[0], (list, tuple)) else list(args) + + def zeros(self, *args, **kwargs): + return Tensor(js_torch.zeros(to_js(self._shape_from_args(args)))) + + def ones(self, *args, **kwargs): + return Tensor(js_torch.ones(to_js(self._shape_from_args(args)))) + + def zeros_like(self, input): + return Tensor(js_torch.zeros_like(input._js)) + + def ones_like(self, input): + return Tensor(js_torch.ones_like(input._js)) + + def randn(self, *args, **kwargs): + return Tensor(js_torch.randn(to_js(self._shape_from_args(args)))) + + def rand(self, *args, **kwargs): + return Tensor(js_torch.rand(to_js(self._shape_from_args(args)))) + + def arange(self, start, end=None, step=1): + if end is None: + end = start + start = 0 + return Tensor(js_torch.arange(start, end, step)) + + def linspace(self, start, end, steps): + return Tensor(js_torch.linspace(start, end, steps)) + + def empty(self, *args, **kwargs): + return Tensor(js_torch.empty(to_js(self._shape_from_args(args)))) + + def empty_like(self, input): + return Tensor(js_torch.empty_like(input._js)) + + def full(self, shape, fill_value): + return Tensor(js_torch.full(to_js(list(shape)), fill_value)) + + def full_like(self, input, fill_value): + return Tensor(js_torch.full_like(input._js, fill_value)) + + def rand_like(self, input): + return Tensor(js_torch.rand_like(input._js)) + + def randn_like(self, input): + return Tensor(js_torch.randn_like(input._js)) + + def randint_like(self, input, low, high): + return Tensor(js_torch.randint_like(input._js, low, high)) + + # --- utility functions --- + + def is_tensor(self, obj): + return isinstance(obj, Tensor) + + def is_nonzero(self, input): + if input.numel() != 1: + raise RuntimeError( + "Boolean value of Tensor with more than one element is ambiguous" + ) + return bool(input.item() != 0) + + def numel(self, input): + return input.numel() + + # --- functional wrappers --- + + def sum(self, input, dim=None, keepdim=False): + return input.sum(dim, keepdim) + + def mean(self, input, dim=None, keepdim=False): + return input.mean(dim, keepdim) + + def sigmoid(self, input): + return input.sigmoid() + + def relu(self, input): + return input.relu() + + def softmax(self, input, dim): + return input.softmax(dim) + + def clamp(self, input, min, max): + return input.clamp(min, max) + + def clip(self, input, min, max): + return self.clamp(input, min, max) + + def flatten(self, input, start_dim=0, end_dim=-1): + return input.flatten(start_dim, end_dim) + + def allclose(self, a, b, rtol=1e-5, atol=1e-8, equal_nan=False): + return a.allclose(b, rtol, atol, equal_nan) + + def is_grad_enabled(self): + return bool(js_torch.is_grad_enabled()) + + def cat(self, tensors, dim=0): + if isinstance(tensors, Tensor): + tensors = [tensors] + return Tensor(js_torch.cat(to_js([t._js for t in tensors]), dim)) + + def concatenate(self, tensors, dim=0): + return self.cat(tensors, dim) + + def concat(self, tensors, dim=0): + return self.cat(tensors, dim) + + def stack(self, tensors, dim=0): + return Tensor(js_torch.stack(to_js([t._js for t in tensors]), dim)) + + def Size(self, shape): + return list(shape) + + def __getattr__(self, name): + if name.startswith('_'): + raise AttributeError(name) + def fn(*args, **kwargs): + return _wrap_result(js_torch.__getattribute__(name)(*_transform_args(args))) + return fn + + +torch = _Torch() diff --git a/examples/pyodide/index.html b/examples/pyodide/index.html new file mode 100644 index 00000000..f3ea57b1 --- /dev/null +++ b/examples/pyodide/index.html @@ -0,0 +1,119 @@ + + + + + + + torch+pyodide browser backprop example + + + + +

Torch + Pyodide

+
+ +
+
+ +
+ +
+ + +
+
+

Output:

+

+    
+ + + + + + diff --git a/examples/pyodide/main.js b/examples/pyodide/main.js new file mode 100644 index 00000000..ab17d477 --- /dev/null +++ b/examples/pyodide/main.js @@ -0,0 +1,120 @@ +import { loadPyodide } from 'pyodide'; +import * as torch from 'torch'; +import { readFileSync, writeFileSync, mkdirSync, readdirSync, existsSync } from 'fs'; +import { execSync } from 'child_process'; +import path from 'path'; + +const PY_DIR = './py'; +const CACHE_DIR = './.cache'; + +function getPyFiles() { + return readdirSync(PY_DIR).filter(f => f.endsWith('.py')).sort(); +} + +async function setupPyodide() { + const pyodide = await loadPyodide(); + pyodide.globals.set('js_torch', torch); + pyodide.globals.set('_js_is_null', (x) => x == null); + pyodide.runPython(readFileSync('./bridge.py', 'utf8')); + return pyodide; +} + +async function runWithPyodide(pyodide, code) { + // Reset grad state to enabled before each file. + // disable_no_grad(prev) restores a previous state; passing True enables grad. + pyodide.runPython('js_torch.disable_no_grad(True)'); + pyodide.globals.set('_test_code', code); + pyodide.runPython(` +import sys, builtins +from io import StringIO +_ns = {'torch': torch, '__builtins__': builtins} +_saved = sys.stdout +sys.stdout = StringIO() +try: + exec(_test_code, _ns) + _output = sys.stdout.getvalue() +finally: + sys.stdout = _saved +`); + return pyodide.globals.get('_output'); +} + +async function cmdGen() { + if (!existsSync(CACHE_DIR)) mkdirSync(CACHE_DIR); + const files = getPyFiles(); + let ok = 0, fail = 0; + for (const file of files) { + const name = path.basename(file, '.py'); + try { + const output = execSync( + `python3 -c "import torch, builtins; exec(open('${PY_DIR}/${file}').read(), {'torch': torch, '__builtins__': builtins})"`, + { encoding: 'utf8' } + ); + writeFileSync(path.join(CACHE_DIR, `${name}.out`), output); + console.log(`✓ ${name}`); + ok++; + } catch (e) { + console.error(`✗ ${name}: ${e.stderr?.trim() ?? e.message}`); + fail++; + } + } + console.log(`\n${ok} generated, ${fail} failed`); + if (fail > 0) process.exit(1); +} + +async function cmdTest() { + const files = getPyFiles(); + let passed = 0, failed = 0; + + for (const file of files) { + const name = path.basename(file, '.py'); + const cachePath = path.join(CACHE_DIR, `${name}.out`); + if (!existsSync(cachePath)) { + console.log(`✗ ${name}: no cache (run 'yarn gen-cache' first)`); + failed++; + continue; + } + + const pyodide = await setupPyodide(); + const code = readFileSync(path.join(PY_DIR, file), 'utf8'); + let actual; + try { + actual = await runWithPyodide(pyodide, code); + } catch (e) { + console.log(`✗ ${name}: ${e.message}`); + failed++; + continue; + } + + const expected = readFileSync(cachePath, 'utf8'); + const aMarked = actual.split('\n').filter(l => l.startsWith('>')); + const eMarked = expected.split('\n').filter(l => l.startsWith('>')); + const mismatches = []; + const maxLen = Math.max(aMarked.length, eMarked.length); + for (let i = 0; i < maxLen; i++) { + if (aMarked[i] !== eMarked[i]) { + mismatches.push(` [assertion ${i + 1}] expected: ${JSON.stringify(eMarked[i] ?? '(missing)')}`); + mismatches.push(` [assertion ${i + 1}] actual: ${JSON.stringify(aMarked[i] ?? '(missing)')}`); + } + } + if (mismatches.length === 0) { + console.log(`✓ ${name} (${aMarked.length} assertions)`); + passed++; + } else { + console.log(`✗ ${name}: ${mismatches.length / 2} assertion(s) failed`); + mismatches.forEach(l => console.log(l)); + failed++; + } + } + + console.log(`\n${passed} passed, ${failed} failed`); + if (failed > 0) process.exit(1); +} + +const [mode] = process.argv.slice(2); +if (mode === 'gen') cmdGen().catch(e => { console.error(e); process.exit(1); }); +else if (mode === 'test') cmdTest().catch(e => { console.error(e); process.exit(1); }); +else { + console.error('Usage: node main.js [gen|test]'); + process.exit(1); +} diff --git a/examples/pyodide/package.json b/examples/pyodide/package.json new file mode 100644 index 00000000..c7b84a73 --- /dev/null +++ b/examples/pyodide/package.json @@ -0,0 +1,14 @@ +{ + "name": "example-pyodide", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "gen-cache": "node main.js gen", + "test-pyodide": "node main.js test" + }, + "dependencies": { + "pyodide": "^0.29.3", + "torch": "portal:../../" + } +} diff --git a/examples/pyodide/py/basic_ops.py b/examples/pyodide/py/basic_ops.py new file mode 100644 index 00000000..7043b094 --- /dev/null +++ b/examples/pyodide/py/basic_ops.py @@ -0,0 +1,122 @@ +# basic_ops.py — Basic tensor operations, autograd, and backward pass. +# Verifies: creation, arithmetic operators, reductions, backward, grad access. + +print("=== Basic tensor creation ===") +x = torch.tensor([1.0, 2.0, 3.0], requires_grad=True) +y = torch.tensor([4.0, 5.0, 6.0], requires_grad=True) +print("x:", x) +print("y:", y) +print("x.shape:", x.shape) +print("x.requires_grad:", x.requires_grad) +print("> x.requires_grad:", x.requires_grad) + +print("\n=== Arithmetic operators ===") +print("x + y:", x + y) +print("x - y:", x - y) +print("x * y:", x * y) +print("x / y:", x / y) +print("x ** 2:", x ** 2) +print("> x+y correct:", (x + y).allclose(torch.tensor([5.0, 7.0, 9.0]))) +print("> x*y correct:", (x * y).allclose(torch.tensor([4.0, 10.0, 18.0]))) +print("> x/y correct:", (x / y).allclose(torch.tensor([0.25, 0.4, 0.5]))) + +print("\n=== Scalar arithmetic (reversed operators) ===") +print("2 + x:", 2 + x) # __radd__ +print("10 - x:", 10 - x) # __rsub__ +print("2 * x:", 2 * x) # __rmul__ +print("1 / x:", 1 / x) # __rtruediv__ +print("2 ** x:", 2 ** x) # __rpow__ +print("> 2+x correct:", (2 + x).allclose(torch.tensor([3.0, 4.0, 5.0]))) +print("> 2*x correct:", (2 * x).allclose(torch.tensor([2.0, 4.0, 6.0]))) + +print("\n=== Unary operations ===") +print("-x:", -x) +print("x.neg():", x.neg()) +print("x.abs():", torch.tensor([-1.0, 2.0, -3.0]).abs()) +print("x.sqrt():", x.sqrt()) +print("x.exp():", x.exp()) +print("x.log():", x.log()) +print("x.sigmoid():", x.sigmoid()) +print("x.relu():", torch.tensor([-1.0, 0.0, 2.0]).relu()) +print("> neg correct:", (-x).allclose(torch.tensor([-1.0, -2.0, -3.0]))) +print("> sqrt correct:", x.sqrt().allclose(torch.tensor([1.0, 1.4142, 1.7321]), atol=1e-4)) +print("> relu correct:", torch.tensor([-1.0, 0.0, 2.0]).relu().allclose(torch.tensor([0.0, 0.0, 2.0]))) + +print("\n=== Reductions ===") +a = torch.tensor([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) +print("a:", a) +print("a.sum():", a.sum()) # sum all +print("a.sum(dim=0):", a.sum(dim=0)) # sum along rows +print("a.sum(dim=1):", a.sum(dim=1)) # sum along cols +print("a.mean():", a.mean()) +print("a.max():", a.max()) +print("a.min():", a.min()) +print("> a.sum() correct:", a.sum().allclose(torch.tensor(21.0))) +print("> a.sum(dim=0) correct:", a.sum(dim=0).allclose(torch.tensor([5.0, 7.0, 9.0]))) +print("> a.mean() correct:", a.mean().allclose(torch.tensor(3.5))) + +print("\n=== Shape utilities ===") +print("a.shape:", a.shape) +print("a.size():", a.size()) +print("a.size(0):", a.size(0)) +print("a.dim():", a.dim()) +print("a.numel():", a.numel()) +print("a.reshape(3, 2):", a.reshape(3, 2)) +print("a.reshape([6]):", a.reshape([6])) +print("a.T:", a.T) +print("a.transpose(0, 1):", a.transpose(0, 1)) +print("> a.size(0):", a.size(0)) +print("> a.dim():", a.dim()) +print("> a.numel():", a.numel()) + +print("\n=== Comparison operators ===") +b = torch.tensor([1.0, 3.0, 3.0]) +c = torch.tensor([2.0, 2.0, 3.0]) +print("b:", b, " c:", c) +print("b.lt(c):", b.lt(c)) +print("b.gt(c):", b.gt(c)) +print("b.eq(c):", b.eq(c)) + +print("\n=== Backward pass (z = sum(x * y)) ===") +x = torch.tensor([1.0, 2.0, 3.0], requires_grad=True) +y = torch.tensor([4.0, 5.0, 6.0], requires_grad=True) +z = (x * y).sum() +print("z:", z) +z.backward() +print("x.grad (expected [4,5,6]):", x.grad) +print("y.grad (expected [1,2,3]):", y.grad) +print("> x.grad correct:", x.grad is not None and x.grad.allclose(torch.tensor([4.0, 5.0, 6.0]))) +print("> y.grad correct:", y.grad is not None and y.grad.allclose(torch.tensor([1.0, 2.0, 3.0]))) + +print("\n=== Backward with chain rule (z = sum((x+1)^2)) ===") +x = torch.tensor([1.0, 2.0, 3.0], requires_grad=True) +z = ((x + 1) ** 2).sum() +z.backward() +# dz/dx = 2*(x+1) = [4, 6, 8] +print("x.grad (expected [4,6,8]):", x.grad) +print("> x.grad correct:", x.grad is not None and x.grad.allclose(torch.tensor([4.0, 6.0, 8.0]))) + +print("\n=== allclose ===") +a = torch.tensor([1.0, 2.0, 3.0]) +b = torch.tensor([1.0, 2.0, 3.0000001]) +print("allclose (expected True):", a.allclose(b)) +print("> allclose (expected True):", a.allclose(b)) +b2 = torch.tensor([1.0, 2.0, 4.0]) +print("allclose (expected False):", a.allclose(b2)) +print("> allclose (expected False):", a.allclose(b2)) + +print("\n=== Iteration ===") +t = torch.tensor([[1.0, 2.0], [3.0, 4.0]]) +for row in t: + print("row:", row) +print("> num rows:", sum(1 for _ in torch.tensor([[1.0, 2.0], [3.0, 4.0]]))) + +print("\n=== item(), float(), int() ===") +s = torch.tensor([42.0]) +print("s.item():", s.item()) +print("float(s):", float(s)) +print("int(s):", int(s)) +print("> float(s):", float(s)) +print("> int(s):", int(s)) + +print("\nAll basic_ops checks passed.") diff --git a/examples/pyodide/py/custom_module.py b/examples/pyodide/py/custom_module.py new file mode 100644 index 00000000..ac6ed975 --- /dev/null +++ b/examples/pyodide/py/custom_module.py @@ -0,0 +1,88 @@ +# custom_module.py — User-defined Module subclass (pure Python), training loop. +# Verifies: Module subclassing, parameter registration, forward(), parameters(). + +class TwoLayerNet(torch.nn.Module): + def __init__(self, in_dim, hidden_dim, out_dim): + super().__init__() + self.fc1 = torch.nn.Linear(in_dim, hidden_dim) + self.fc2 = torch.nn.Linear(hidden_dim, out_dim) + + def forward(self, x): + x = self.fc1(x) + x = x.relu() + x = self.fc2(x) + return x + + +print("=== Custom Module construction ===") +model = TwoLayerNet(2, 4, 1) + +x = torch.tensor([[1.0, 2.0], [3.0, 4.0]]) +out = model(x) +print("input shape:", x.shape) +print("output shape:", out.shape) +print("output:", out) + +print("\n=== parameters() collects from all sub-modules ===") +params = list(model.parameters()) +print("number of parameters:", len(params)) +# 2 layers × (weight + bias) = 4 parameters +print("expected 4:", len(params) == 4) +print("> n_params == 4:", len(params) == 4) + +print("\n=== named_parameters() ===") +for name, p in model.named_parameters(): + print(f" {name}: shape={p.shape}") + +print("\n=== Backward through custom Module ===") +loss = out.sum() +loss.backward() +print("All parameter grads computed:") +for name, p in model.named_parameters(): + print(f" {name}.grad is not None:", p.grad is not None) +print("> all param grads computed:", all(p.grad is not None for p in model.parameters())) + +print("\n=== Training loop with custom Module ===") + +# XOR-like dataset (linearly separable approximation) +xs = torch.tensor([ + [0.0, 0.0], + [0.0, 1.0], + [1.0, 0.0], + [1.0, 1.0], +]) +ys = torch.tensor([[0.0], [1.0], [1.0], [0.0]]) + +model = TwoLayerNet(2, 8, 1) +loss_fn = torch.nn.MSELoss() +opt = torch.optim.Adam(model.parameters(), lr=0.05) + +initial_loss = None +for epoch in range(300): + opt.zero_grad() + pred = model(xs) + loss = loss_fn(pred, ys) + loss.backward() + opt.step() + if epoch == 0: + initial_loss = loss.item() + if epoch % 50 == 0: + print(f" epoch {epoch:3d} loss={loss:.4f}") + +print("Loss decreased:", loss.item() < initial_loss) +print("> loss decreased:", loss.item() < initial_loss) + +print("\n=== Module.zero_grad() clears all parameter grads ===") +# Run one backward pass +opt.zero_grad() +model(xs).sum().backward() +grads_exist = all(p.grad is not None for p in model.parameters()) +print("Grads exist after backward:", grads_exist) +print("> grads exist after backward:", grads_exist) + +model.zero_grad() +grads_cleared = all(p.grad is None for p in model.parameters()) +print("Grads cleared after zero_grad:", grads_cleared) +print("> grads cleared after zero_grad:", grads_cleared) + +print("\nAll custom_module checks passed.") diff --git a/examples/pyodide/py/linear_model.py b/examples/pyodide/py/linear_model.py new file mode 100644 index 00000000..4494a4b6 --- /dev/null +++ b/examples/pyodide/py/linear_model.py @@ -0,0 +1,83 @@ +# linear_model.py — torch.nn.Linear forward pass, loss computation, backward. +# Verifies: nn.Linear, MSELoss, BCELoss, Sequential, nn.ReLU, gradient flow. + +print("=== nn.Linear forward pass ===") +fc = torch.nn.Linear(3, 2) +x = torch.tensor([[1.0, 2.0, 3.0]]) # batch of 1, 3 features +out = fc(x) +print("input shape:", x.shape) +print("output shape:", out.shape) +print("output:", out) + +print("\n=== nn.Linear parameters ===") +params = list(fc.parameters()) +print("number of parameters:", len(params)) +print("param shapes:", [p.shape for p in params]) +print("all require grad:", all(p.requires_grad for p in params)) +print("> n_params:", len(params)) +print("> all require grad:", all(p.requires_grad for p in params)) + +print("\n=== named_parameters ===") +for name, p in fc.named_parameters(): + print(f" {name}: shape={p.shape}") + +print("\n=== MSELoss ===") +pred = torch.tensor([1.0, 2.0, 3.0]) +target = torch.tensor([1.5, 2.5, 2.0]) +loss_fn = torch.nn.MSELoss() +loss = loss_fn(pred, target) +print("MSELoss:", loss) +# Expected: mean([(0.5)^2, (0.5)^2, (1)^2]) = mean([0.25, 0.25, 1.0]) = 0.5 +expected = ((1.0-1.5)**2 + (2.0-2.5)**2 + (3.0-2.0)**2) / 3 +print("Expected:", expected) +print("Close:", abs(loss.item() - expected) < 1e-6) +print("> MSELoss close:", abs(loss.item() - expected) < 1e-6) + +print("\n=== L1Loss ===") +loss_fn = torch.nn.L1Loss() +loss = loss_fn(pred, target) +print("L1Loss:", loss) +expected_l1 = (0.5 + 0.5 + 1.0) / 3 +print("Expected:", expected_l1) +print("Close:", abs(loss.item() - expected_l1) < 1e-6) +print("> L1Loss close:", abs(loss.item() - expected_l1) < 1e-6) + +print("\n=== Backward through Linear ===") +fc = torch.nn.Linear(2, 1) +x = torch.tensor([[1.0, 2.0]], requires_grad=True) +out = fc(x) +loss = out.sum() +loss.backward() +print("x.grad (should be non-None):", x.grad) +for name, p in fc.named_parameters(): + print(f" {name}.grad is not None:", p.grad is not None) +print("> x.grad is not None:", x.grad is not None) +print("> all param grads computed:", all(p.grad is not None for p in fc.parameters())) + +print("\n=== Sequential ===") +model = torch.nn.Sequential( + torch.nn.Linear(4, 8), + torch.nn.ReLU(), + torch.nn.Linear(8, 2), +) +x = torch.tensor([[1.0, 0.5, -1.0, 2.0]]) +out = model(x) +print("Sequential output shape:", out.shape) +print("Sequential output:", out) +params = list(model.parameters()) +print("Total parameters:", len(params)) +print("> sequential n_params:", len(params)) + +print("\n=== zero_grad clears gradients ===") +fc = torch.nn.Linear(2, 1) +x = torch.tensor([[1.0, 1.0]]) +out = fc(x).sum() +out.backward() +for name, p in fc.named_parameters(): + print(f" {name}.grad before zero_grad:", p.grad) +fc.zero_grad() +for name, p in fc.named_parameters(): + print(f" {name}.grad after zero_grad (expected None):", p.grad) +print("> grads cleared:", all(p.grad is None for p in fc.parameters())) + +print("\nAll linear_model checks passed.") diff --git a/examples/pyodide/py/nn_module.py b/examples/pyodide/py/nn_module.py new file mode 100644 index 00000000..1b32eecb --- /dev/null +++ b/examples/pyodide/py/nn_module.py @@ -0,0 +1,101 @@ +# nn_module.py — Tests for custom nn.Module subclasses, parameter registration, +# call vs forward separation, and nested models. + +class MyLinearLayer(torch.nn.Module): + def __init__(self, in_features, out_features): + super().__init__() + self.weight = torch.nn.Parameter(torch.rand(in_features, out_features)) + self.bias = torch.nn.Parameter(torch.rand(out_features)) + + def forward(self, input): + return input @ self.weight + self.bias + +class MySmallModel(torch.nn.Module): + def __init__(self, in_features, intermediate_features, out_features): + super().__init__() + # Using our own defined layer + self.lin1 = MyLinearLayer(in_features, intermediate_features) + # Using pre-defined Linear Layer + self.lin2 = torch.nn.Linear(intermediate_features, out_features) + + def forward(self, x): + x = self.lin1(x) + x = self.lin2(x) + return x + + +print("=== MyLinearLayer: output shape ===") +layer = MyLinearLayer(4, 3) +x = torch.tensor([[1.0, 2.0, 3.0, 4.0]]) +out = layer(x) +print("output shape:", list(out.shape)) +print("> output shape correct:", list(out.shape) == [1, 3]) + +print("\n=== MyLinearLayer: parameter registration ===") +params = list(layer.parameters()) +print("num parameters:", len(params)) +print("> num parameters:", len(params) == 2) # weight + bias +for name, p in layer.named_parameters(): + print(f" {name}: shape={list(p.shape)}") +print("> weight shape:", list(layer.weight.shape) == [4, 3]) +print("> bias shape:", list(layer.bias.shape) == [3]) + +print("\n=== MyLinearLayer: __call__ vs forward() ===") +out_call = layer(x) +out_forward = layer.forward(x) +print("> outputs match:", torch.allclose(out_call, out_forward)) + +print("\n=== MyLinearLayer: backward ===") +layer.zero_grad() +layer(x).sum().backward() +print("> weight.grad exists:", layer.weight.grad is not None) +print("> bias.grad exists:", layer.bias.grad is not None) + +print("\n=== MySmallModel: output shape ===") +model = MySmallModel(4, 8, 2) +x = torch.tensor([[1.0, 2.0, 3.0, 4.0]]) +out = model(x) +print("output shape:", list(out.shape)) +print("> output shape correct:", list(out.shape) == [1, 2]) + +print("\n=== MySmallModel: parameters collected from both sub-modules ===") +params = list(model.parameters()) +# lin1: weight (4x8) + bias (8) = 2 params +# lin2: weight (8x2) + bias (2) = 2 params +print("num parameters:", len(params)) +print("> num parameters:", len(params) == 4) +for name, p in model.named_parameters(): + print(f" {name}: shape={list(p.shape)}") + +print("\n=== MySmallModel: __call__ vs forward() ===") +out_call = model(x) +out_forward = model.forward(x) +print("> outputs match:", torch.allclose(out_call, out_forward)) + +print("\n=== MySmallModel: backward through nested modules ===") +model.zero_grad() +model(x).sum().backward() +print("> all grads computed:", all(p.grad is not None for p in model.parameters())) + +print("\n=== __call__ vs forward() on a built-in module ===") +fc = torch.nn.Linear(3, 2) +x = torch.tensor([[1.0, 2.0, 3.0]]) +out_call = fc(x) +out_forward = fc.forward(x) +print("> outputs match:", torch.allclose(out_call, out_forward)) +print("> shapes match:", out_call.shape == out_forward.shape) + +print("\n=== Sequential: submodules run via __call__ path ===") +seq = torch.nn.Sequential( + torch.nn.Linear(2, 4), + torch.nn.ReLU(), + torch.nn.Linear(4, 1), +) +x = torch.tensor([[1.0, 2.0]]) +out_seq_call = seq(x) +out_seq_forward = seq.forward(x) +print("output shape:", list(out_seq_call.shape)) +print("> output shape correct:", list(out_seq_call.shape) == [1, 1]) +print("> call and forward match:", torch.allclose(out_seq_call, out_seq_forward)) + +print("\nAll nn_module checks passed.") diff --git a/examples/pyodide/py/no_grad.py b/examples/pyodide/py/no_grad.py new file mode 100644 index 00000000..682ff6ea --- /dev/null +++ b/examples/pyodide/py/no_grad.py @@ -0,0 +1,53 @@ +# no_grad.py — Verify that torch.no_grad() actually disables gradient tracking. + +print("=== no_grad context manager ===") + +x = torch.tensor([1.0, 2.0, 3.0], requires_grad=True) + +print("Grad enabled outside context:", torch.is_grad_enabled()) +print("> Grad enabled outside context:", torch.is_grad_enabled()) + +with torch.no_grad(): + print("Grad enabled inside context:", torch.is_grad_enabled()) + print("> Grad enabled inside context:", torch.is_grad_enabled()) + y = x * 2 + print("y.requires_grad inside no_grad (expected False):", y.requires_grad) + print("> y.requires_grad inside no_grad:", y.requires_grad) + # y has no grad_fn, so backward would be a no-op + +print("Grad enabled after context:", torch.is_grad_enabled()) +print("> Grad enabled after context:", torch.is_grad_enabled()) + +# Outside no_grad, requires_grad propagates normally +z = x * 2 +print("z.requires_grad outside no_grad (expected True):", z.requires_grad) +print("> z.requires_grad outside no_grad:", z.requires_grad) + +print("\n=== Gradient NOT computed inside no_grad ===") +x = torch.tensor([3.0], requires_grad=True) +with torch.no_grad(): + y = x * x # should not build computation graph + print("y:", y) + print("y.requires_grad (expected False):", y.requires_grad) + print("> y.requires_grad inside no_grad:", y.requires_grad) + +# x.grad should still be None since we didn't call backward +print("x.grad after no_grad block (expected None):", x.grad) +print("> x.grad after no_grad block is None:", x.grad is None) + +print("\n=== Gradient IS computed outside no_grad ===") +x = torch.tensor([3.0], requires_grad=True) +y = x * x +y.backward() +print("x.grad after backward (expected 6.0):", x.grad) +print("> x.grad correct:", x.grad is not None and x.grad.allclose(torch.tensor([6.0]))) + +print("\n=== Nested no_grad blocks restore state correctly ===") +print("is_grad_enabled:", torch.is_grad_enabled()) +print("> is_grad_enabled before outer:", torch.is_grad_enabled()) +with torch.no_grad(): + print("inside outer no_grad:", torch.is_grad_enabled()) +print("after outer no_grad (expected True):", torch.is_grad_enabled()) +print("> is_grad_enabled after outer:", torch.is_grad_enabled()) + +print("\nAll no_grad checks passed.") diff --git a/examples/pyodide/py/training_sgd.py b/examples/pyodide/py/training_sgd.py new file mode 100644 index 00000000..ae7133be --- /dev/null +++ b/examples/pyodide/py/training_sgd.py @@ -0,0 +1,81 @@ +# training_sgd.py — Full training loop: fit y = 2x + 1 with SGD and Adam. +# Verifies: optimizer.step(), optimizer.zero_grad(), loss decreases. + +print("=== Training y = 2x + 1 with SGD ===") + +# Dataset: y = 2*x + 1 +xs = torch.tensor([[1.0], [2.0], [3.0], [4.0], [5.0]]) +ys = torch.tensor([[3.0], [5.0], [7.0], [9.0], [11.0]]) + +model = torch.nn.Linear(1, 1) +loss_fn = torch.nn.MSELoss() +opt = torch.optim.SGD(model.parameters(), lr=0.01) + +losses = [] +for epoch in range(200): + opt.zero_grad() + pred = model(xs) + loss = loss_fn(pred, ys) + loss.backward() + opt.step() + if epoch % 40 == 0: + losses.append(loss.item()) + print(f" epoch {epoch:3d} loss={loss:.6f}") + +print("Loss decreased:", losses[-1] < losses[0]) +print("> SGD loss decreased:", losses[-1] < losses[0]) + +with torch.no_grad(): + pred = model(xs) +print("Final predictions:", pred) +print("Expected: ", ys) + +# Check weights are approximately correct (w≈2, b≈1) +params = {name: p for name, p in model.named_parameters()} +w = params['weight'].item() +b = params['bias'].item() +print(f"Learned weight={w:.3f} (expected ~2), bias={b:.3f} (expected ~1)") +print("Weight close:", abs(w - 2.0) < 0.2) +print("Bias close: ", abs(b - 1.0) < 0.5) + +print("\n=== Training y = 2x + 1 with Adam ===") + +model = torch.nn.Linear(1, 1) +loss_fn = torch.nn.MSELoss() +opt = torch.optim.Adam(model.parameters(), lr=0.05) + +losses = [] +for epoch in range(200): + opt.zero_grad() + pred = model(xs) + loss = loss_fn(pred, ys) + loss.backward() + opt.step() + if epoch % 40 == 0: + losses.append(loss.item()) + print(f" epoch {epoch:3d} loss={loss:.6f}") + +print("Loss decreased:", losses[-1] < losses[0]) +print("> Adam loss decreased:", losses[-1] < losses[0]) +params = {name: p for name, p in model.named_parameters()} +w = params['weight'].item() +b = params['bias'].item() +print(f"Learned weight={w:.3f} (expected ~2), bias={b:.3f} (expected ~1)") + +print("\n=== Optimizer.zero_grad() resets gradients ===") +fc = torch.nn.Linear(2, 1) +opt = torch.optim.SGD(fc.parameters(), lr=0.1) + +x = torch.tensor([[1.0, 2.0]]) +fc(x).sum().backward() + +grads_before = [p.grad.tolist() for p in fc.parameters()] +print("Grads after first backward:", grads_before) +print("> grads exist after backward:", all(p.grad is not None for p in fc.parameters())) + +opt.zero_grad() +grads_after = [p.grad for p in fc.parameters()] +print("Grads after zero_grad (expected all None):", grads_after) +print("> grads None after zero_grad:", all(p.grad is None for p in fc.parameters())) + +print("\nAll training_sgd checks passed.") diff --git a/examples/pyodide/yarn.lock b/examples/pyodide/yarn.lock new file mode 100644 index 00000000..0c93a3a3 --- /dev/null +++ b/examples/pyodide/yarn.lock @@ -0,0 +1,53 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 8 + cacheKey: 10c0 + +"@types/emscripten@npm:^1.41.4": + version: 1.41.5 + resolution: "@types/emscripten@npm:1.41.5" + checksum: 10c0/ae816da716f896434e59df7a71b67c71ae7e85ca067a32aef1616572fc4757459515d42ade6f5b8fd8d69733a9dbd0cf23010fec5b2f41ce52c09501aa350e45 + languageName: node + linkType: hard + +"example-pyodide@workspace:.": + version: 0.0.0-use.local + resolution: "example-pyodide@workspace:." + dependencies: + pyodide: "npm:^0.29.3" + torch: "portal:../../" + languageName: unknown + linkType: soft + +"pyodide@npm:^0.29.3": + version: 0.29.3 + resolution: "pyodide@npm:0.29.3" + dependencies: + "@types/emscripten": "npm:^1.41.4" + ws: "npm:^8.5.0" + checksum: 10c0/4c8108e9af7cd8997812507a01c3dd48789ab58973bdef3ac5a336c38837e7495146195be7fb8b0798bcc3c4f79e98877efaf0672d18e088488512eefbf1d3ca + languageName: node + linkType: hard + +"torch@portal:../../::locator=example-pyodide%40workspace%3A.": + version: 0.0.0-use.local + resolution: "torch@portal:../../::locator=example-pyodide%40workspace%3A." + languageName: node + linkType: soft + +"ws@npm:^8.5.0": + version: 8.19.0 + resolution: "ws@npm:8.19.0" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: 10c0/4741d9b9bc3f9c791880882414f96e36b8b254e34d4b503279d6400d9a4b87a033834856dbdd94ee4b637944df17ea8afc4bce0ff4a1560d2166be8855da5b04 + languageName: node + linkType: hard diff --git a/package.json b/package.json new file mode 100644 index 00000000..bc0f1e53 --- /dev/null +++ b/package.json @@ -0,0 +1,84 @@ +{ + "type": "module", + "name": "torch", + "packageManager": "yarn@4.10.3", + "version": "0.1.0", + "description": "machine-learning libraries for Source Academy", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/source-academy/torch.git" + }, + "license": "Apache-2.0", + "files": [ + "build", + "src", + "examples" + ], + "main": "./build/node/torch.node.cjs", + "module": "./build/node/torch.node.es.mjs", + "types": "./build/types/index.d.ts", + "unpkg": "./build/torch.min.js", + "jsdelivr": "./build/torch.min.js", + "exports": { + ".": { + "torch-src": "./src/index.ts", + "types": "./build/types/index.d.ts", + "browser": "./build/browser/torch.browser.es.js", + "import": "./build/node/torch.node.es.mjs", + "require": "./build/node/torch.node.cjs" + } + }, + "mocha": { + "require": "tsx" + }, + "scripts": { + "clean": "rimraf build", + "build:browser": "vite build --config vite.config.browser.ts", + "build:browser:watch": "vite build --config vite.config.browser.ts --watch", + "build:node": "vite build --config vite.config.node.ts", + "build:node:watch": "vite build --config vite.config.node.ts --watch", + "build:cdn": "vite build --config vite.config.cdn.ts", + "build:cdn:watch": "vite build --config vite.config.cdn.ts --watch", + "build:types": "tsc -p tsconfig.build.json", + "build": "yarn clean && concurrently \"yarn build:node\" \"yarn build:browser\" \"yarn build:cdn\" \"yarn build:types\"", + "dev": "concurrently \"yarn:build:browser:watch\" \"yarn:build:node:watch\" \"yarn:test:watch\" \"yarn:serve\"", + "docs": "typedoc --out docs src", + "lint": "eslint src", + "serve": "http-server -p 8080 -c-1", + "test": "mocha --node-option conditions=torch-src test/**/*.test.ts test/**/*.test.js", + "test:build": "mocha test/**/*.test.ts test/**/*.test.js", + "test:coverage": "c8 mocha --node-option conditions=torch-src test/**/*.test.ts test/**/*.test.js", + "test:watch": "mocha --node-option conditions=torch-src --watch test/**/*.test.ts test/**/*.test.js", + "update-tests": "python3 scripts/generate_tests.py > test/testcases.gen.js" + }, + "scriptsComments": { + "build": "Builds the library for browser and node.", + "dev": "Starts a dev session. Watch and build, test, and serve on localhost:8080", + "test": "Tests on code in src/", + "test:build": "Tests on built library in build/", + "test:coverage": "Provide test coverage", + "update-tests": "Updates test using scripts/generate_tests.py to test/testcases.gen.js" + }, + "devDependencies": { + "@eslint/js": "^9.37.0", + "@types/mocha": "^10.0.10", + "@types/node": "^24.6.2", + "c8": "^11.0.0", + "chai": "^6.2.0", + "concurrently": "^9.2.1", + "eslint": "^9.37.0", + "eslint-plugin-import": "^2.32.0", + "http-server": "^14.1.1", + "mocha": "^11.7.4", + "pkg-pr-new": "^0.0.62", + "prettier": "^3.6.2", + "rimraf": "^6.0.1", + "ts-node": "^10.9.2", + "tsx": "^4.20.6", + "typedoc": "^0.28.14", + "typescript": "^5.9.3", + "typescript-eslint": "^8.46.0", + "vite": "^7.1.9", + "vite-plugin-dts": "^4.5.4" + } +} diff --git a/scripts/.gitignore b/scripts/.gitignore new file mode 100644 index 00000000..e15106e3 --- /dev/null +++ b/scripts/.gitignore @@ -0,0 +1,216 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[codz] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py.cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +# Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +# poetry.lock +# poetry.toml + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. +# https://pdm-project.org/en/latest/usage/project/#working-with-version-control +# pdm.lock +# pdm.toml +.pdm-python +.pdm-build/ + +# pixi +# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. +# pixi.lock +# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one +# in the .venv directory. It is recommended not to include this directory in version control. +.pixi + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# Redis +*.rdb +*.aof +*.pid + +# RabbitMQ +mnesia/ +rabbitmq/ +rabbitmq-data/ + +# ActiveMQ +activemq-data/ + +# SageMath parsed files +*.sage.py + +# Environments +.env +.envrc +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +# .idea/ + +# Abstra +# Abstra is an AI-powered process automation framework. +# Ignore directories containing user credentials, local state, and settings. +# Learn more at https://abstra.io/docs +.abstra/ + +# Visual Studio Code +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore +# and can be added to the global gitignore or merged into this file. However, if you prefer, +# you could uncomment the following to ignore the entire vscode folder +# .vscode/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Marimo +marimo/_static/ +marimo/_lsp/ +__marimo__/ + +# Streamlit +.streamlit/secrets.toml diff --git a/scripts/generate_tests.py b/scripts/generate_tests.py new file mode 100644 index 00000000..2b9aaed4 --- /dev/null +++ b/scripts/generate_tests.py @@ -0,0 +1,47 @@ +import torch + +from generator.encoder import CompactJSONEncoder +from generator.unary import generate_unary_tests +from generator.binary import generate_binary_tests +from generator.broadcasting import generate_broadcasting_tests +from generator.matmul import generate_matmul_tests +from generator.reduction import generate_reduction_tests +from generator.linear import generate_linear_tests +from generator.optimizer import generate_optimizer_tests +from generator.expand import generate_expand_tests +from generator.conv import generate_conv_tests +from generator.export import generate_export_tests +from generator.loss import generate_loss_tests +from generator.activation import generate_activation_tests +from generator.cat import generate_cat_tests +from generator.softmax import generate_softmax_tests +from generator.clamp import generate_clamp_tests +from generator.maxpool import generate_maxpool_tests + +UNARY_OPS = ["log", "sqrt", "exp", "square", "abs", "sign", "neg", "reciprocal", "sin", "cos", "tan"] +BINARY_OPS = ["add", "sub", "mul", "div", "pow", "maximum", "minimum"] + +torch.manual_seed(42) + +if __name__ == "__main__": + suite = { + "unary": {op: generate_unary_tests(op) for op in UNARY_OPS}, + "binary": {op: generate_binary_tests(op) for op in BINARY_OPS}, + "broadcasting": generate_broadcasting_tests(), + "matmul": generate_matmul_tests(), + "reductions": generate_reduction_tests(), + "linear": generate_linear_tests(), + "optimizers": generate_optimizer_tests(), + "expand": generate_expand_tests(), + "conv": generate_conv_tests(), + "export": generate_export_tests(), + "loss": generate_loss_tests(), + "activations": generate_activation_tests(), + "cat": generate_cat_tests(), + "softmax": generate_softmax_tests(), + "clamp": generate_clamp_tests(), + "maxpool": generate_maxpool_tests(), + } + + print("export const testData = ", end="") + print(CompactJSONEncoder(indent=2).encode(suite), end=";\n") diff --git a/scripts/generator/__init__.py b/scripts/generator/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/scripts/generator/activation.py b/scripts/generator/activation.py new file mode 100644 index 00000000..a9492921 --- /dev/null +++ b/scripts/generator/activation.py @@ -0,0 +1,39 @@ +import torch +import torch.nn as nn + + +def generate_activation_tests(): + tests = [] + + cases = [ + # (ActivationClass, test_name, kwargs) + (nn.ReLU, "relu_1d", (5,), {}), + (nn.ReLU, "relu_2d", (3, 4), {}), + (nn.ReLU, "relu_3d", (2, 3, 4), {}), + (nn.Sigmoid, "sigmoid_1d", (5,), {}), + (nn.Sigmoid, "sigmoid_2d", (3, 4), {}), + (nn.Sigmoid, "sigmoid_3d", (2, 3, 4), {}), + (nn.LeakyReLU, "leaky_relu_default", (5,), {}), + (nn.LeakyReLU, "leaky_relu_slope_0_2", (3, 4), {"negative_slope": 0.2}), + (nn.LeakyReLU, "leaky_relu_3d", (2, 3, 4), {"negative_slope": 0.1}), + ] + + for ActivationClass, desc, input_shape, kwargs in cases: + input = torch.randn(*input_shape, requires_grad=True) + + activation = ActivationClass(**kwargs) + output = activation(input) + output.sum().backward() + + entry = { + "test_name": desc, + "activation_type": ActivationClass.__name__, + "input": input.detach().numpy().tolist(), + "expected_output": output.detach().numpy().tolist(), + "expected_grad_input": input.grad.numpy().tolist(), + } + if kwargs: + entry["kwargs"] = kwargs + tests.append(entry) + + return tests diff --git a/scripts/generator/binary.py b/scripts/generator/binary.py new file mode 100644 index 00000000..b742ef56 --- /dev/null +++ b/scripts/generator/binary.py @@ -0,0 +1,30 @@ +import torch + + +def generate_binary_tests(op_name, num_tests=3): + tests = [] + for i in range(num_tests): + x = torch.randn(4, 4, requires_grad=True) + y = torch.randn(4, 4, requires_grad=True) + with torch.no_grad(): + x[0:2, 0] = 0 + x[0:2, 1] = 1 + x[0:2, 2] = -1 + y[0, 0:2] = 0 + y[1, 0:2] = 1 + y[2, 0:2] = -1 + + torch_op = getattr(torch, op_name) + out = torch_op(x, y) + out.sum().backward() + + tests.append( + { + "input_x": x.detach().numpy().tolist(), + "input_y": y.detach().numpy().tolist(), + "expected_output": out.detach().numpy().tolist(), + "expected_grad_x": x.grad.numpy().tolist(), + "expected_grad_y": y.grad.numpy().tolist(), + } + ) + return tests diff --git a/scripts/generator/broadcasting.py b/scripts/generator/broadcasting.py new file mode 100644 index 00000000..da21d188 --- /dev/null +++ b/scripts/generator/broadcasting.py @@ -0,0 +1,41 @@ +import torch + + +def generate_broadcasting_tests(): + shape_pairs = [ + ((2, 3), (2, 3)), + ((), (2, 2)), + ((2, 3, 4, 1), (3, 1, 1)), + ((1,), (3, 1, 2)), + ((5, 1, 4, 1), (3, 1, 1)), + ] + + ops = ["add", "mul"] + tests = [] + + for shape_x, shape_y in shape_pairs: + for op_name in ops: + x = torch.randn(shape_x, requires_grad=True) if shape_x != () else torch.tensor(1.5, requires_grad=True) + y = torch.randn(shape_y, requires_grad=True) if shape_y != () else torch.tensor(-0.5, requires_grad=True) + + torch_op = getattr(torch, op_name) + out = torch_op(x, y) + + out.sum().backward() + + sx_str = "scalar" if shape_x == () else str(shape_x) + sy_str = "scalar" if shape_y == () else str(shape_y) + + tests.append( + { + "test_name": f"broadcast_{op_name}_{sx_str}_and_{sy_str}", + "op_name": op_name, + "input_x": x.detach().numpy().tolist(), + "input_y": y.detach().numpy().tolist(), + "expected_output": out.detach().numpy().tolist(), + "expected_grad_x": x.grad.numpy().tolist() if x.grad is not None else 0.0, + "expected_grad_y": y.grad.numpy().tolist() if y.grad is not None else 0.0, + } + ) + + return tests diff --git a/scripts/generator/cat.py b/scripts/generator/cat.py new file mode 100644 index 00000000..cb29ccbf --- /dev/null +++ b/scripts/generator/cat.py @@ -0,0 +1,34 @@ +import torch + + +def generate_cat_tests(): + tests = [] + + # (list_of_shapes, dim) + configs = [ + ([(3,), (4,)], 0), # 1D, two tensors + ([(3,), (4,), (2,)], 0), # 1D, three tensors + ([(2, 3), (4, 3)], 0), # 2D, along dim 0 + ([(2, 3), (2, 4)], 1), # 2D, along dim 1 + ([(2, 3, 4), (5, 3, 4)], 0), # 3D, along dim 0 + ([(2, 3, 4), (2, 1, 4)], 1), # 3D, along dim 1 + ([(2, 3, 4), (2, 3, 2)], 2), # 3D, along dim 2 + ([(2, 3, 4), (2, 3, 4)], -1), # 3D, negative dim + ([(2, 3), (2, 3), (2, 3)], 0), # three equal-shape tensors + ] + + for shapes, dim in configs: + tensors = [torch.randn(*s, requires_grad=True) for s in shapes] + out = torch.cat(tensors, dim=dim) + out.sum().backward() + + name_parts = "_".join("x".join(str(d) for d in s) for s in shapes) + tests.append({ + "test_name": f"cat_dim{dim}_{name_parts}", + "inputs": [t.detach().numpy().tolist() for t in tensors], + "dim": dim, + "expected_output": out.detach().numpy().tolist(), + "expected_grads": [t.grad.numpy().tolist() for t in tensors], + }) + + return tests diff --git a/scripts/generator/clamp.py b/scripts/generator/clamp.py new file mode 100644 index 00000000..cea7611c --- /dev/null +++ b/scripts/generator/clamp.py @@ -0,0 +1,30 @@ +import torch + + +def generate_clamp_tests(): + tests = [] + + cases = [ + ("clamp_1d", (5,), -0.5, 0.5), + ("clamp_2d", (3, 4), 0.0, 1.0), + ("clamp_3d", (2, 3, 4), -1.0, 1.0), + ("clamp_negative_range", (4,), -2.0, -0.5), + ] + + for desc, shape, min_val, max_val in cases: + x = torch.randn(*shape, requires_grad=True) + y = torch.clamp(x, min=min_val, max=max_val) + y.sum().backward() + + tests.append( + { + "test_name": desc, + "min": min_val, + "max": max_val, + "input": x.detach().numpy().tolist(), + "expected_output": y.detach().numpy().tolist(), + "expected_grad": x.grad.numpy().tolist(), + } + ) + + return tests diff --git a/scripts/generator/conv.py b/scripts/generator/conv.py new file mode 100644 index 00000000..4e4951de --- /dev/null +++ b/scripts/generator/conv.py @@ -0,0 +1,91 @@ +import torch + + +def generate_conv_tests(): + tests = [] + cases = [ + ( + "Conv1d", + torch.nn.Conv1d, + [ + (1, 1, 3, 1, 0, 1, 1, True, (1, 1, 5), "basic"), + (2, 3, 2, 2, 1, 1, 1, True, (2, 2, 6), "stride_padding"), + (2, 2, 2, 1, 0, 2, 1, False, (1, 2, 5), "dilation_no_bias"), + (4, 4, 3, 1, 1, 1, 2, True, (1, 4, 6), "groups"), + ], + ), + ( + "Conv2d", + torch.nn.Conv2d, + [ + (1, 1, 3, 1, 0, 1, 1, True, (1, 1, 5, 5), "basic"), + (2, 3, 2, 2, 1, 1, 1, True, (2, 2, 6, 6), "stride_padding"), + (2, 2, 2, 1, 0, 2, 1, False, (1, 2, 5, 5), "dilation_no_bias"), + (4, 4, 3, 1, 1, 1, 2, True, (1, 4, 4, 4), "groups"), + ], + ), + ( + "Conv3d", + torch.nn.Conv3d, + [ + (1, 1, 3, 1, 0, 1, 1, True, (1, 1, 5, 5, 5), "basic"), + (2, 3, 2, 2, 1, 1, 1, True, (2, 2, 4, 4, 4), "stride_padding"), + (2, 2, 2, 1, 0, 2, 1, False, (1, 2, 5, 5, 5), "dilation_no_bias"), + (4, 4, 3, 1, 1, 1, 2, True, (1, 4, 4, 4, 4), "groups"), + ], + ), + ] + for conv_type, conv_class, conv_cases in cases: + for ( + in_channels, + out_channels, + kernel_size, + stride, + padding, + dilation, + groups, + bias, + input_shape, + desc, + ) in conv_cases: + layer = conv_class( + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=bias, + ) + x = torch.randn(*input_shape, requires_grad=True) + + out = layer(x) + out.sum().backward() + + test_data = { + "test_name": f"{conv_type.lower()}_{desc}", + "conv_type": conv_type, + "in_channels": in_channels, + "out_channels": out_channels, + "kernel_size": kernel_size, + "stride": stride, + "padding": padding, + "dilation": dilation, + "groups": groups, + "has_bias": bias, + "input": x.detach().numpy().tolist(), + "weight": layer.weight.detach().numpy().tolist(), + "expected_output": out.detach().numpy().tolist(), + "expected_grad_input": x.grad.numpy().tolist(), + "expected_grad_weight": layer.weight.grad.numpy().tolist(), + } + if bias: + test_data["bias"] = layer.bias.detach().numpy().tolist() + test_data["expected_grad_bias"] = layer.bias.grad.numpy().tolist() + else: + test_data["bias"] = None + test_data["expected_grad_bias"] = None + + tests.append(test_data) + return tests diff --git a/scripts/generator/encoder.py b/scripts/generator/encoder.py new file mode 100644 index 00000000..482e1ff8 --- /dev/null +++ b/scripts/generator/encoder.py @@ -0,0 +1,39 @@ +import json + + +class CompactJSONEncoder(json.JSONEncoder): + """JSON encoder that puts leaf arrays (arrays of non-list items) on single lines.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._indent = kwargs.get("indent", 2) + + def encode(self, o): + return self._encode(o, 0) + + def _encode(self, o, level): + indent_str = " " * (self._indent * level) + next_indent_str = " " * (self._indent * (level + 1)) + + if isinstance(o, dict): + if not o: + return "{}" + items = [] + for k, v in o.items(): + key = json.dumps(k) + val = self._encode(v, level + 1) + items.append(f"{next_indent_str}{key}: {val}") + return "{\n" + ",\n".join(items) + f"\n{indent_str}}}" + + elif isinstance(o, list): + if not o: + return "[]" + # Leaf array: no nested lists inside + if not any(isinstance(item, (list, dict)) for item in o): + return "[" + ", ".join(self._encode(v, level) for v in o) + "]" + # Nested array: pretty print + items = [f"{next_indent_str}{self._encode(v, level + 1)}" for v in o] + return "[\n" + ",\n".join(items) + f"\n{indent_str}]" + + else: + return json.dumps(o) diff --git a/scripts/generator/expand.py b/scripts/generator/expand.py new file mode 100644 index 00000000..459e3b25 --- /dev/null +++ b/scripts/generator/expand.py @@ -0,0 +1,32 @@ +import torch + + +def generate_expand_tests(): + tests = [] + + # Tuples of (initial_shape, expand_shape, description) + cases = [ + ((1,), (3,), "1D_expand"), + ((3,), (2, 3), "prepend_2D"), + ((1, 3), (4, 3), "expand_dim_0"), + ((2, 1, 4), (2, 5, 4), "expand_middle_dim"), + ((1, 3, 1), (2, -1, 4), "preserve_with_negative_one"), + ] + + for initial_shape, expand_shape, desc in cases: + x = torch.randn(initial_shape, requires_grad=True) + + out = x.expand(*expand_shape) + out.sum().backward() + + tests.append( + { + "test_name": f"expand_{desc}", + "input": x.detach().numpy().tolist(), + "expand_shape": list(expand_shape), + "expected_output": out.detach().numpy().tolist(), + "expected_grad": x.grad.numpy().tolist(), + } + ) + + return tests diff --git a/scripts/generator/export.py b/scripts/generator/export.py new file mode 100644 index 00000000..8e32f041 --- /dev/null +++ b/scripts/generator/export.py @@ -0,0 +1,93 @@ +import torch + + +def generate_export_tests(): + tests = [] + + def extract_nodes(ep): + nodes = [] + for node in ep.graph.nodes: + nd = { + "op": node.op, + "name": node.name, + "target": str(node.target), + } + nd["args"] = [] + for a in node.args: + if isinstance(a, torch.fx.Node): + nd["args"].append(a.name) + elif isinstance(a, tuple): + nd["args"].append([n.name if isinstance(n, torch.fx.Node) else str(n) for n in a]) + else: + nd["args"].append(str(a)) + if "val" in node.meta: + v = node.meta["val"] + if isinstance(v, torch.Tensor): + nd["val_shape"] = list(v.shape) + nodes.append(nd) + return nodes + + def extract_specs(ep): + sig = ep.graph_signature + input_specs = [ + {"kind": s.kind.name, "name": s.arg.name if hasattr(s.arg, "name") else str(s.arg)} for s in sig.input_specs + ] + output_specs = [ + {"kind": s.kind.name, "name": s.arg.name if hasattr(s.arg, "name") else str(s.arg)} + for s in sig.output_specs + ] + return input_specs, output_specs + + # Test 1: Simple Sequential(Linear, ReLU) + torch.manual_seed(42) + model1 = torch.nn.Sequential(torch.nn.Linear(3, 2), torch.nn.ReLU()) + x1 = torch.randn(2, 3) + ep1 = torch.export.export(model1, (x1,)) + + nodes1 = extract_nodes(ep1) + input_specs1, output_specs1 = extract_specs(ep1) + + tests.append( + { + "test_name": "linear_relu", + "model_type": "LinearReLU", + "input": x1.detach().numpy().tolist(), + "weight": model1[0].weight.detach().numpy().tolist(), + "bias": model1[0].bias.detach().numpy().tolist(), + "in_features": 3, + "out_features": 2, + "expected_nodes": nodes1, + "expected_input_specs": input_specs1, + "expected_output_specs": output_specs1, + } + ) + + # Test 2: Sequential(Linear, ReLU, Linear, Sigmoid) + torch.manual_seed(42) + model2 = torch.nn.Sequential(torch.nn.Linear(4, 3), torch.nn.ReLU(), torch.nn.Linear(3, 2), torch.nn.Sigmoid()) + x2 = torch.randn(2, 4) + ep2 = torch.export.export(model2, (x2,)) + + nodes2 = extract_nodes(ep2) + input_specs2, output_specs2 = extract_specs(ep2) + + tests.append( + { + "test_name": "two_layer", + "model_type": "TwoLayer", + "input": x2.detach().numpy().tolist(), + "linear1_weight": model2[0].weight.detach().numpy().tolist(), + "linear1_bias": model2[0].bias.detach().numpy().tolist(), + "linear2_weight": model2[2].weight.detach().numpy().tolist(), + "linear2_bias": model2[2].bias.detach().numpy().tolist(), + "linear1_in": 4, + "linear1_out": 3, + "linear2_in": 3, + "linear2_out": 2, + "expected_nodes": nodes2, + "expected_input_specs": input_specs2, + "expected_output_specs": output_specs2, + } + ) + + return tests diff --git a/scripts/generator/linear.py b/scripts/generator/linear.py new file mode 100644 index 00000000..10e494b3 --- /dev/null +++ b/scripts/generator/linear.py @@ -0,0 +1,30 @@ +import torch + + +def generate_linear_tests(): + tests = [] + # Test standard 2D inputs and batched 3D inputs + cases = [(10, 5, (3, 10), "2D_input"), (4, 2, (2, 3, 4), "3D_batched_input")] + + for in_features, out_features, input_shape, desc in cases: + layer = torch.nn.Linear(in_features, out_features) + x = torch.randn(*input_shape, requires_grad=True) + + out = layer(x) + out.sum().backward() + + tests.append( + { + "test_name": f"linear_{desc}", + "in_features": in_features, + "out_features": out_features, + "input": x.detach().numpy().tolist(), + "weight": layer.weight.detach().numpy().tolist(), + "bias": layer.bias.detach().numpy().tolist(), + "expected_output": out.detach().numpy().tolist(), + "expected_grad_input": x.grad.numpy().tolist(), + "expected_grad_weight": layer.weight.grad.numpy().tolist(), + "expected_grad_bias": layer.bias.grad.numpy().tolist(), + } + ) + return tests diff --git a/scripts/generator/loss.py b/scripts/generator/loss.py new file mode 100644 index 00000000..29995b82 --- /dev/null +++ b/scripts/generator/loss.py @@ -0,0 +1,100 @@ +import torch +import torch.nn as nn + + +def generate_loss_tests(): + tests = [] + + cases = [ + # (LossClass, input_shape, target_shape, test_name, needs_sigmoid) + (nn.MSELoss, (5,), (5,), "mse_1d", False), + (nn.MSELoss, (3, 4), (3, 4), "mse_2d", False), + (nn.MSELoss, (2, 3, 4), (2, 3, 4), "mse_3d", False), + (nn.L1Loss, (5,), (5,), "l1_1d", False), + (nn.L1Loss, (3, 4), (3, 4), "l1_2d", False), + (nn.L1Loss, (2, 3, 4), (2, 3, 4), "l1_3d", False), + # BCE requires inputs in (0, 1) + (nn.BCELoss, (5,), (5,), "bce_1d", True), + (nn.BCELoss, (3, 4), (3, 4), "bce_2d", True), + ] + + for LossClass, input_shape, target_shape, desc, needs_sigmoid in cases: + if needs_sigmoid: + input = torch.sigmoid(torch.randn(*input_shape)).requires_grad_(True) + target = torch.rand(*target_shape) + else: + input = torch.randn(*input_shape, requires_grad=True) + target = torch.randn(*target_shape) + + loss_fn = LossClass() + output = loss_fn(input, target) + output.backward() + + tests.append( + { + "test_name": desc, + "loss_type": LossClass.__name__, + "input": input.detach().numpy().tolist(), + "target": target.detach().numpy().tolist(), + "expected_output": output.detach().numpy().tolist(), + "expected_grad_input": input.grad.numpy().tolist(), + } + ) + + # CrossEntropyLoss: input is (N, C) logits, target is (N,) class indices + ce_cases = [ + ((3, 5), "ce_3x5"), + ((4, 3), "ce_4x3"), + ((2, 10), "ce_2x10"), + ] + + for (N, C), desc in ce_cases: + input = torch.randn(N, C, requires_grad=True) + target = torch.randint(0, C, (N,)) + + loss_fn = nn.CrossEntropyLoss() + output = loss_fn(input, target) + output.backward() + + tests.append( + { + "test_name": desc, + "loss_type": "CrossEntropyLoss", + "input": input.detach().numpy().tolist(), + "target": target.numpy().tolist(), + "expected_output": output.detach().numpy().tolist(), + "expected_grad_input": input.grad.numpy().tolist(), + } + ) + + # NLLLoss: input is (N, C) log-probs, target is (N,) class indices + nll_cases = [ + ((3, 5), "nll_3x5"), + ((4, 3), "nll_4x3"), + ((2, 10), "nll_2x10"), + ] + + for (N, C), desc in nll_cases: + # Produce proper log-probabilities via log_softmax + raw = torch.randn(N, C, requires_grad=True) + import torch.nn.functional as F + input = F.log_softmax(raw, dim=1) + input = input.detach().requires_grad_(True) + target = torch.randint(0, C, (N,)) + + loss_fn = nn.NLLLoss() + output = loss_fn(input, target) + output.backward() + + tests.append( + { + "test_name": desc, + "loss_type": "NLLLoss", + "input": input.detach().numpy().tolist(), + "target": target.numpy().tolist(), + "expected_output": output.detach().numpy().tolist(), + "expected_grad_input": input.grad.numpy().tolist(), + } + ) + + return tests diff --git a/scripts/generator/matmul.py b/scripts/generator/matmul.py new file mode 100644 index 00000000..d9bfd739 --- /dev/null +++ b/scripts/generator/matmul.py @@ -0,0 +1,33 @@ +import torch + + +def generate_matmul_tests(): + matmul_cases = [ + ((3,), (3,), "1D_dot_product"), + ((2, 3), (3, 4), "2D_matrix_multiply"), + ((3,), (3, 4), "1D_2D_prepend_remove"), + ((2, 3), (3,), "2D_1D_matrix_vector"), + ((2, 1, 2, 3), (3, 3, 2), "ND_batched_with_broadcast"), + ] + + tests = [] + + for shape_x, shape_y, desc in matmul_cases: + x = torch.randn(shape_x, requires_grad=True) + y = torch.randn(shape_y, requires_grad=True) + + out = torch.matmul(x, y) + out.sum().backward() + + tests.append( + { + "test_name": f"matmul_{desc}", + "input_x": x.detach().numpy().tolist(), + "input_y": y.detach().numpy().tolist(), + "expected_output": out.detach().numpy().tolist(), + "expected_grad_x": x.grad.numpy().tolist(), + "expected_grad_y": y.grad.numpy().tolist(), + } + ) + + return tests diff --git a/scripts/generator/maxpool.py b/scripts/generator/maxpool.py new file mode 100644 index 00000000..b866b446 --- /dev/null +++ b/scripts/generator/maxpool.py @@ -0,0 +1,39 @@ +import torch +import torch.nn as nn + + +def generate_maxpool_tests(): + tests = [] + + cases = [ + ("maxpool_basic_2x2", (1, 1, 4, 4), 2, None, 0), + ("maxpool_stride_1", (1, 2, 4, 4), 2, 1, 0), + ("maxpool_padding_1", (1, 1, 4, 4), 3, 1, 1), + ("maxpool_multichannel", (2, 3, 6, 6), 2, 2, 0), + ("maxpool_3d_input", (1, 4, 4), 2, None, 0), + ] + + for desc, shape, kernel_size, stride, padding in cases: + x = torch.randn(*shape, requires_grad=True) + if stride is None: + pool = nn.MaxPool2d(kernel_size, padding=padding) + else: + pool = nn.MaxPool2d(kernel_size, stride=stride, padding=padding) + y = pool(x) + y.sum().backward() + + stride_val = stride if stride is not None else kernel_size + + tests.append( + { + "test_name": desc, + "kernel_size": kernel_size, + "stride": stride_val, + "padding": padding, + "input": x.detach().numpy().tolist(), + "expected_output": y.detach().numpy().tolist(), + "expected_grad": x.grad.numpy().tolist(), + } + ) + + return tests diff --git a/scripts/generator/optimizer.py b/scripts/generator/optimizer.py new file mode 100644 index 00000000..0126fa54 --- /dev/null +++ b/scripts/generator/optimizer.py @@ -0,0 +1,54 @@ +import torch + + +def generate_optimizer_tests(): + tests = [] + configs = [ + ("SGD_basic", torch.optim.SGD, {"lr": 0.1}), + ("SGD_momentum", torch.optim.SGD, {"lr": 0.1, "momentum": 0.9}), + ("SGD_weight_decay", torch.optim.SGD, {"lr": 0.1, "weight_decay": 0.01}), + ("SGD_nesterov", torch.optim.SGD, {"lr": 0.1, "momentum": 0.9, "nesterov": True}), + ("Adam_basic", torch.optim.Adam, {"lr": 0.1}), + ("Adam_custom_betas", torch.optim.Adam, {"lr": 0.1, "betas": (0.95, 0.999)}), + ("Adam_weight_decay", torch.optim.Adam, {"lr": 0.1, "weight_decay": 0.01}), + ("Adam_amsgrad", torch.optim.Adam, {"lr": 0.1, "amsgrad": True}), + ("Adagrad_basic", torch.optim.Adagrad, {"lr": 0.1}), + ("Adagrad_lr_decay", torch.optim.Adagrad, {"lr": 0.1, "lr_decay": 0.01}), + ("Adagrad_weight_decay", torch.optim.Adagrad, {"lr": 0.1, "weight_decay": 0.01}), + ] + + for test_name, optim_class, kwargs in configs: + # 1. Initialize a generic parameter tensor + w = torch.randn(3, 3, requires_grad=True) + x = torch.randn(3, 3) # Dummy input to calculate a loss + + # Save exact starting state + initial_w = w.detach().clone() + + # 2. Setup Optimizer + optimizer = optim_class([w], **kwargs) + optimizer.zero_grad() + + # 3. Compute loss and gradients + loss = (w * x).sum() + loss.backward() + + # Save exact gradient computed by PyTorch + expected_grad = w.grad.detach().clone() + + # 4. Step the optimizer + optimizer.step() + + tests.append( + { + "test_name": test_name, + "optimizer": test_name.split("_")[0], # "SGD" or "Adam" + "kwargs": kwargs, + "initial_weight": initial_w.numpy().tolist(), + "input_x": x.numpy().tolist(), + "expected_grad": expected_grad.numpy().tolist(), + "expected_updated_weight": w.detach().numpy().tolist(), + } + ) + + return tests diff --git a/scripts/generator/reduction.py b/scripts/generator/reduction.py new file mode 100644 index 00000000..3ff12ec8 --- /dev/null +++ b/scripts/generator/reduction.py @@ -0,0 +1,41 @@ +import torch + + +def generate_reduction_tests(): + tests = [] + ops = ["sum", "mean", "max", "min"] + dims = [None, 0, 1, -1] + keepdims = [False, True] + + for op in ops: + for dim in dims: + for keepdim in keepdims: + if dim is None and keepdim: + continue + + x = torch.randn(3, 4, 5, requires_grad=True) + torch_op = getattr(torch, op) + + try: + if dim is None: + y = torch_op(x) + else: + out = torch_op(x, dim=dim, keepdim=keepdim) + y = out.values if op in ["max", "min"] else out + + y.sum().backward() + + tests.append( + { + "test_name": f"{op}_dim_{dim}_keepdim_{keepdim}", + "op_name": op, + "dim": dim, + "keepdim": keepdim, + "input": x.detach().numpy().tolist(), + "expected_output": y.detach().numpy().tolist(), + "expected_grad": x.grad.numpy().tolist(), + } + ) + except Exception as e: + pass + return tests diff --git a/scripts/generator/softmax.py b/scripts/generator/softmax.py new file mode 100644 index 00000000..eed26091 --- /dev/null +++ b/scripts/generator/softmax.py @@ -0,0 +1,31 @@ +import torch +import torch.nn.functional as F + + +def generate_softmax_tests(): + tests = [] + + cases = [ + ("softmax_1d_dim0", (5,), 0), + ("softmax_2d_dim0", (3, 4), 0), + ("softmax_2d_dim1", (3, 4), 1), + ("softmax_3d_dim1", (2, 3, 4), 1), + ("softmax_3d_dim2", (2, 3, 4), 2), + ] + + for desc, shape, dim in cases: + x = torch.randn(*shape, requires_grad=True) + y = F.softmax(x, dim=dim) + y.sum().backward() + + tests.append( + { + "test_name": desc, + "dim": dim, + "input": x.detach().numpy().tolist(), + "expected_output": y.detach().numpy().tolist(), + "expected_grad": x.grad.numpy().tolist(), + } + ) + + return tests diff --git a/scripts/generator/unary.py b/scripts/generator/unary.py new file mode 100644 index 00000000..c43071b6 --- /dev/null +++ b/scripts/generator/unary.py @@ -0,0 +1,24 @@ +import torch + + +def generate_unary_tests(op_name, num_tests=3): + tests = [] + for i in range(num_tests): + x = torch.randn(3, 3, requires_grad=True) + with torch.no_grad(): + x[0, 0] = 0 + x[0, 1] = 1 + x[0, 2] = -1 + + torch_op = getattr(torch, op_name) + y = torch_op(x) + y.sum().backward() + + tests.append( + { + "input": x.detach().numpy().tolist(), + "expected_output": y.detach().numpy().tolist(), + "expected_grad": x.grad.numpy().tolist(), + } + ) + return tests diff --git a/scripts/pyproject.toml b/scripts/pyproject.toml new file mode 100644 index 00000000..55ec8d78 --- /dev/null +++ b/scripts/pyproject.toml @@ -0,0 +1,2 @@ +[tool.black] +line-length = 120 diff --git a/scripts/requirements.txt b/scripts/requirements.txt new file mode 100644 index 00000000..5ecc8ff9 --- /dev/null +++ b/scripts/requirements.txt @@ -0,0 +1,4 @@ +--extra-index-url https://download.pytorch.org/whl/cpu + +torch==2.10.* +numpy~=2.4.0 diff --git a/src/broadcasting.ts b/src/broadcasting.ts new file mode 100644 index 00000000..f80a7ee1 --- /dev/null +++ b/src/broadcasting.ts @@ -0,0 +1,59 @@ +// https://docs.pytorch.org/docs/stable/notes/broadcasting.html +export function _broadcast_shape(a_shape: number[], b_shape: number[]): number[] { + const result_length = Math.max(a_shape.length, b_shape.length); + const padded_a_shape = [...Array(result_length - a_shape.length).fill(1), ...a_shape]; + const padded_b_shape = [...Array(result_length - b_shape.length).fill(1), ...b_shape]; + + const result_shape: number[] = []; + + for (let i = 0; i < result_length; i++) { + if ( + padded_a_shape[i] !== padded_b_shape[i] && + padded_a_shape[i] !== 1 && + padded_b_shape[i] !== 1 + ) { + throw new Error(`Shape mismatch: ${a_shape} and ${b_shape}`); + } + + result_shape.push(Math.max(padded_a_shape[i], padded_b_shape[i])); + } + + return result_shape; +} + +export function _unbroadcast(result_shape: number[], original_shape: number[], result: number[]) { + const this_shape = _pad_shape(original_shape, result_shape) + const unbroadcasted_result = new Array(original_shape.reduce((acc, cur) => acc * cur, 1)).fill(0); + for (let i = 0; i < result.length; i++) { + unbroadcasted_result[_get_original_index(this_shape, result_shape, i)] += result[i]; + } + return unbroadcasted_result; +} + +export function _pad_shape(shape: number[], broadcast_shape: number[]): number[] { + if (shape.length >= broadcast_shape.length) { + return shape; + } + + return [...Array(broadcast_shape.length - shape.length).fill(1), ...shape]; +} + +export function _get_original_index( + original_shape: number[], + new_shape: number[], + index: number +): number { + let original_index = 0; + let cur_stride = 1; + let temp_index = index; + + for (let i = original_shape.length - 1; i >= 0; i--) { + if (original_shape[i] > 1) { + const dim_index = temp_index % new_shape[i]; + original_index = original_index + dim_index * cur_stride; + } + cur_stride *= original_shape[i]; + temp_index = Math.floor(temp_index / new_shape[i]); + } + return original_index; +} diff --git a/src/creation/index.ts b/src/creation/index.ts new file mode 100644 index 00000000..8eb4eb86 --- /dev/null +++ b/src/creation/index.ts @@ -0,0 +1,3 @@ +export * from './rand'; +export * from './initializers'; +export * from './ranges'; diff --git a/src/creation/initializers.ts b/src/creation/initializers.ts new file mode 100644 index 00000000..153f64c5 --- /dev/null +++ b/src/creation/initializers.ts @@ -0,0 +1,40 @@ +import { Tensor, NestedNumberArray } from '../tensor'; +import { _get_shape_from_args, _numel } from '../util'; + +export function tensor(data: NestedNumberArray, requires_grad: boolean = false): Tensor { + return new Tensor(data, { requires_grad }); +} + +export function full(shape: number[], fill_value: number): Tensor { + const t = new Tensor(Array(_numel(shape)).fill(fill_value)); + t.shape = shape; + return t; +} + +export function zeros(...args: number[] | number[][]): Tensor { + return full(_get_shape_from_args(args), 0); +} + +export function ones(...args: number[] | number[][]): Tensor { + return full(_get_shape_from_args(args), 1); +} + +export function empty(...args: number[] | number[][]): Tensor { + return full(_get_shape_from_args(args), 0); +} + +export function full_like(input: Tensor, fill_value: number): Tensor { + return full(input.shape, fill_value); +} + +export function zeros_like(input: Tensor): Tensor { + return full(input.shape, 0); +} + +export function ones_like(input: Tensor): Tensor { + return full(input.shape, 1); +} + +export function empty_like(input: Tensor): Tensor { + return full(input.shape, 0); +} diff --git a/src/creation/rand.ts b/src/creation/rand.ts new file mode 100644 index 00000000..98f14f3a --- /dev/null +++ b/src/creation/rand.ts @@ -0,0 +1,47 @@ +import { Tensor } from '../tensor'; +import { _get_shape_from_args, _numel } from '../util'; +import { uniformDist, normalDist } from '../prng'; + +export function randn(...args: number[] | number[][]): Tensor { + const shape = _get_shape_from_args(args); + const tensor = new Tensor(Array.from({ length: _numel(shape) }, normalDist())); + tensor.shape = shape; + return tensor; +} + +export function rand(...args: number[] | number[][]): Tensor { + const shape = _get_shape_from_args(args); + const tensor = new Tensor(Array.from({ length: _numel(shape) }, uniformDist())); + tensor.shape = shape; + return tensor; +} + +export function randint(low: number, high: number, shape: number[]): Tensor { + const tensor = new Tensor( + Array.from({ length: _numel(shape) }, () => Math.floor(uniformDist(low, high)())) + ); + tensor.shape = shape; + return tensor; +} + +export function randperm(n: number): Tensor { + const arr = Array.from({ length: n }, (_, i) => i); + for (let i = 0; i < n; i++) { + const j = Math.floor(uniformDist()() * (n - i)) + i; + [arr[i], arr[j]] = [arr[j], arr[i]]; + } + const tensor = new Tensor(arr); + return tensor; +} + +export function rand_like(input: Tensor): Tensor { + return rand(input.shape); +} + +export function randn_like(input: Tensor): Tensor { + return randn(input.shape); +} + +export function randint_like(input: Tensor, low: number, high: number): Tensor { + return randint(low, high, input.shape); +} diff --git a/src/creation/ranges.ts b/src/creation/ranges.ts new file mode 100644 index 00000000..812c555a --- /dev/null +++ b/src/creation/ranges.ts @@ -0,0 +1,29 @@ +import { Tensor } from '../tensor'; + +export function linspace(start: number, end: number, steps: number) { + const data = []; + const step = (end - start) / (steps - 1); + for (let i = 0; i < steps - 1; i++) { + data.push(start + i * step); + } + data.push(end); + return new Tensor(data); +} + +export function arange(start: number, end: number = undefined, step: number = 1) { + const data = []; + if (end === undefined) { + end = start; + start = 0; + } + if (step === 0) { + throw new Error('step must be nonzero'); + } + if (Math.sign(end - start) !== Math.sign(step)) { + throw new Error('upper bound and lower bound inconsistent with step sign'); + } + for (let i = start; i < end; i += step) { + data.push(i); + } + return new Tensor(data); +} diff --git a/src/export.ts b/src/export.ts new file mode 100644 index 00000000..045a2982 --- /dev/null +++ b/src/export.ts @@ -0,0 +1,319 @@ +import { Tensor } from './tensor'; +import { TorchFunction } from './functions/base'; +import { Module } from './nn/base'; +import { no_grad } from './grad_mode'; +import { eventBus, events } from './util'; + +/** + * A graph node in the exported program, matching PyTorch's FX node format. + */ +export interface GraphNode { + /** Node type: 'placeholder' for inputs/params, 'call_function' for ops, 'output' for result */ + op: 'placeholder' | 'call_function' | 'output'; + /** Unique name for this node (e.g. "add", "linear_1") */ + name: string; + /** Operation target (e.g. "aten.add.default") */ + target: string; + /** References to input node names */ + args: (string | string[])[]; + /** Output tensor shape, if available */ + val_shape?: number[]; +} + +export interface InputSpec { + kind: 'PARAMETER' | 'USER_INPUT'; + name: string; + target?: string; +} + +export interface OutputSpec { + kind: 'USER_OUTPUT'; + name: string; +} + +export interface GraphSignature { + input_specs: InputSpec[]; + output_specs: OutputSpec[]; +} + +/** + * Maps our internal op names to PyTorch's aten operator names. + * See: https://docs.pytorch.org/docs/2.10/user_guide/torch_compiler/torch.compiler_ir.html + */ +export const _atenMap: Record = { + 'add': 'aten.add.Tensor', + 'sub': 'aten.sub.Tensor', + 'mul': 'aten.mul.Tensor', + 'div': 'aten.div.Tensor', + 'pow': 'aten.pow.Tensor_Tensor', + 'powint': 'aten.pow.Tensor_Scalar', + 'fmod': 'aten.fmod.Tensor', + 'maximum': 'aten.maximum.default', + 'minimum': 'aten.minimum.default', + 'log': 'aten.log.default', + 'sqrt': 'aten.sqrt.default', + 'exp': 'aten.exp.default', + 'square': 'aten.square.default', + 'abs': 'aten.abs.default', + 'sign': 'aten.sign.default', + 'neg': 'aten.neg.default', + 'reciprocal': 'aten.reciprocal.default', + 'nan_to_num': 'aten.nan_to_num.default', + 'reshape': 'aten.reshape.default', + 'flatten': 'aten.flatten.using_ints', + 'squeeze': 'aten.squeeze.dim', + 'unsqueeze': 'aten.unsqueeze.default', + 'expand': 'aten.expand.default', + 'sin': 'aten.sin.default', + 'cos': 'aten.cos.default', + 'tan': 'aten.tan.default', + 'tanh': 'aten.tanh.default', + 'sum': 'aten.sum.default', + 'mean': 'aten.mean.default', + 'min': 'aten.min.default', + 'max': 'aten.max.default', + 'transpose': 'aten.transpose.int', + 'matmul': 'aten.matmul.default', + 'relu': 'aten.relu.default', + 'sigmoid': 'aten.sigmoid.default', + 'lt': 'aten.lt.Tensor', + 'gt': 'aten.gt.Tensor', + 'le': 'aten.le.Tensor', + 'ge': 'aten.ge.Tensor', + 'eq': 'aten.eq.Tensor', + 'ne': 'aten.ne.Tensor', + 'conv1d': 'aten.conv1d.default', + 'conv2d': 'aten.conv2d.default', + 'conv3d': 'aten.conv3d.default', + 'linear': 'aten.linear.default', + 'cross_entropy_loss': 'aten.cross_entropy_loss.default', + 'nll_loss': 'aten.nll_loss_forward.default', + 'cat': 'aten.cat.default', + 'softmax': 'aten._softmax.default', + 'clamp': 'aten.clamp.default', + 'leaky_relu': 'aten.leaky_relu.default', + 'max_pool2d': 'aten.max_pool2d.default', +}; + +/** + * Maps our internal op names to PyTorch's aten operator names with default value. + */ +function toAtenTarget(opName: string): string { + return _atenMap[opName] || `aten.${opName}.default`; +} + +/** + * Manages unique node name generation with PyTorch-style deduplication. + * E.g. first "add" -> "add", second "add" -> "add_1" + */ +class NameGenerator { + private counts = new Map(); + + generate(baseName: string): string { + const count = this.counts.get(baseName) || 0; + this.counts.set(baseName, count + 1); + return count === 0 ? baseName : `${baseName}_${count}`; + } +} + +/** + * An exported program, matching PyTorch's ExportedProgram structure. + */ +export class ExportedProgram { + constructor( + public graph: GraphNode[], + public graph_signature: GraphSignature, + public parameters: Map + ) { } + + toString(): string { + const lines: string[] = ['ExportedProgram:']; + + // Format forward signature + const inputArgs = this.graph + .filter(n => n.op === 'placeholder') + .map(n => { + const shape = n.val_shape ? JSON.stringify(n.val_shape) : '?'; + return `${n.name}: "${shape}"`; + }) + .join(', '); + lines.push(` class GraphModule(torch.nn.Module):`); + lines.push(` def forward(self, ${inputArgs}):`); + + // Operations + for (const node of this.graph) { + if (node.op === 'call_function') { + const args = node.args.join(', '); + lines.push(` ${node.name} = ${node.target}(${args})`); + } else if (node.op === 'output') { + lines.push(` return (${node.args.join(', ')},)`); + } + } + + lines.push(''); + lines.push('Graph signature:'); + lines.push(' # inputs'); + for (const spec of this.graph_signature.input_specs) { + const target = spec.target ? ` target='${spec.target}'` : ''; + lines.push(` ${spec.name}: ${spec.kind}${target}`); + } + lines.push(' # outputs'); + for (const spec of this.graph_signature.output_specs) { + lines.push(` ${spec.name}: ${spec.kind}`); + } + + return lines.join('\n'); + } +} + +/** + * Export a module's forward pass as an ExportedProgram. + * + * This traces the module's forward() with the given sample inputs + * and captures the computation graph. Similar to PyTorch's torch.export.export(). + * + * Named `export_` to avoid conflict with the JavaScript `export` keyword. + * + * @param module The nn.Module to export + * @param sampleInputs Sample input tensors for tracing + * @returns An ExportedProgram containing the traced graph + */ +export function export_( + module: Module, + sampleInputs: Tensor[] +): ExportedProgram { + const graph: GraphNode[] = []; + const nameGen = new NameGenerator(); + + // Map tensor IDs to their graph node names + const tensorIdToName = new Map(); + + // 1. Create placeholder nodes for parameters + const namedParams = module.named_parameters(); + const paramTensorIds = new Set(); + const inputSpecs: InputSpec[] = []; + + for (const [paramPath, param] of namedParams) { + // Convert "linear.weight" -> "p_linear_weight" (PyTorch convention) + const placeholderName = 'p_' + paramPath.replace(/\./g, '_'); + const nodeName = nameGen.generate(placeholderName); + tensorIdToName.set(param.id, nodeName); + paramTensorIds.add(param.id); + + graph.push({ + op: 'placeholder', + name: nodeName, + target: nodeName, + args: [], + val_shape: param.shape, + }); + + inputSpecs.push({ + kind: 'PARAMETER', + name: nodeName, + target: paramPath, + }); + } + + // 2. Create placeholder nodes for user inputs + for (let i = 0; i < sampleInputs.length; i++) { + const baseName = 'input'; + const nodeName = nameGen.generate(baseName); + tensorIdToName.set(sampleInputs[i].id, nodeName); + + graph.push({ + op: 'placeholder', + name: nodeName, + target: nodeName, + args: [], + val_shape: sampleInputs[i].shape, + }); + + inputSpecs.push({ + kind: 'USER_INPUT', + name: nodeName, + }); + } + + // 3. Trace the forward pass, recording operations + const handler = (e: CustomEvent) => { + const { operation, args, result } = e.detail as { + operation: TorchFunction; + args: (Tensor | number | number[] | boolean)[]; + result: Tensor; + }; + + const opName = operation.opName; + if (!opName) return; // Skip if no opName (shouldn't happen) + + // Build arg references + const nodeArgs: (string | string[])[] = []; + for (const arg of args) { + if (arg instanceof Tensor) { + const name = tensorIdToName.get(arg.id); + if (name) { + nodeArgs.push(name); + } + // If not found, it's an intermediate constant — skip + } + // Numbers and arrays are non-tensor args; we don't include them + // in the graph node args to match PyTorch's behavior for simple cases + } + + // Generate node name from opName + const nodeName = nameGen.generate(opName); + tensorIdToName.set(result.id, nodeName); + + graph.push({ + op: 'call_function', + name: nodeName, + target: toAtenTarget(opName), + args: nodeArgs, + val_shape: result.shape, + }); + }; + + eventBus.addEventListener( + events.OPERATION_AFTER_FORWARD, + handler as EventListener + ); + + let output: Tensor; + try { + output = no_grad(() => module.forward(...sampleInputs)); + } finally { + eventBus.removeEventListener( + events.OPERATION_AFTER_FORWARD, + handler as EventListener + ); + } + + // 4. Add output node + const outputName = tensorIdToName.get(output.id) || 'output'; + graph.push({ + op: 'output', + name: 'output', + target: 'output', + args: [outputName], + }); + + const outputSpecs: OutputSpec[] = [{ + kind: 'USER_OUTPUT', + name: outputName, + }]; + + // 5. Collect parameters + const parameters = new Map(); + for (const [paramPath, param] of namedParams) { + parameters.set(paramPath, { + data: [...param.data], + shape: [...param.shape], + }); + } + + return new ExportedProgram( + graph, + { input_specs: inputSpecs, output_specs: outputSpecs }, + parameters + ); +} diff --git a/src/functions/base.ts b/src/functions/base.ts new file mode 100644 index 00000000..897d1741 --- /dev/null +++ b/src/functions/base.ts @@ -0,0 +1,125 @@ +import { _unbroadcast } from '../broadcasting'; +import { zeros_like } from '../creation'; +import { is_grad_enabled } from '../grad_mode'; +import { Tensor } from '../tensor'; +import { eventBus, getNextId, events } from '../util'; + +export type ArgumentType = Tensor | Tensor[] | number | number[] | boolean | string; + +export function resultRequiresGrad(...args: ArgumentType[]): boolean { + if (!is_grad_enabled()) return false; + for (const arg of args) { + if (arg instanceof Tensor && arg.requires_grad) { + return true; + } + } + return false; +} + +abstract class TorchFunction { + public id: number = getNextId(); + public opName: string = ''; + public next_functions: TorchFunction[] = []; + public saved_tensors: Tensor[] = []; + public _retained_tensors: Tensor[] = []; + + protected abstract _forward(...args: ArgumentType[]): Tensor; + protected abstract _backward(dz: Tensor | number): void; + + forward(...args: ArgumentType[]): Tensor { + const requires_grad = resultRequiresGrad(...args); + eventBus.dispatchEvent( + new CustomEvent(events.OPERATION_BEFORE_FORWARD, { + detail: { + operation: this, + requires_grad, + args + } + }) + ); + const result = this._forward(...args); + eventBus.dispatchEvent( + new CustomEvent(events.OPERATION_AFTER_FORWARD, { + detail: { + operation: this, + requires_grad, + args, + result + } + }) + ); + return result; + } + + backward(dz: Tensor | number): void { + eventBus.dispatchEvent( + new CustomEvent(events.OPERATION_BEFORE_BACKWARD, { detail: { operation: this, dz } }) + ); + for (const x of this._retained_tensors) { + if (!x.grad) { + x.grad = new Tensor(new Array(x.dataLength()).fill(0)); + } + x.grad = x.grad.add(dz); + } + this._backward(dz); + eventBus.dispatchEvent( + new CustomEvent(events.OPERATION_AFTER_BACKWARD, { detail: { operation: this, dz } }) + ); + } +} + +class NullOp extends TorchFunction { + protected _forward(..._args: ArgumentType[]): Tensor { + throw new Error('NullOp should not be called'); + } + protected _backward(_dz: Tensor): void { + return; + } +} + +export const nullOp = new NullOp(); + +abstract class UnaryFunction extends TorchFunction { + protected abstract _forward(a: Tensor): Tensor; + protected abstract _backward(dz: Tensor): void; +} + +abstract class BinaryFunction extends TorchFunction { + protected abstract _forward(a: Tensor, b: Tensor): Tensor; + protected abstract _backward(dz: Tensor): void; +} + +export type TorchFunctionConstructor = new () => TorchFunction; +export type UnaryFunctionConstructor = new () => UnaryFunction; +export type BinaryFunctionConstructor = new () => BinaryFunction; + +export { TorchFunction, UnaryFunction, BinaryFunction }; + +export class AccumulateGrad extends UnaryFunction { + public variable: Tensor; + + protected _forward(variable: Tensor): Tensor { + this.variable = variable; + return variable; + } + + protected _backward(dz: Tensor | number): void { + if (!this.variable.grad) { + this.variable.grad = zeros_like(this.variable); + } + eventBus.dispatchEvent( + new CustomEvent(events.OPERATION_BEFORE_ACCUMULATE_GRAD, { detail: { operation: this, dz } }) + ); + if (typeof dz === 'number') { + this.variable.grad = this.variable.grad.add(dz); + } else { + const unbroadcasted_dz = _unbroadcast(dz.shape, this.variable.shape, dz.data); + this.variable.grad = this.variable.grad.add( + new Tensor(unbroadcasted_dz, {}, { shape: this.variable.shape }) + ); + } + eventBus.dispatchEvent( + new CustomEvent(events.OPERATION_AFTER_ACCUMULATE_GRAD, { detail: { operation: this, dz } }) + ); + } +} diff --git a/src/functions/functional.ts b/src/functions/functional.ts new file mode 100644 index 00000000..305963c2 --- /dev/null +++ b/src/functions/functional.ts @@ -0,0 +1,315 @@ +import { Tensor } from '../tensor'; +import { createOperation } from './registry'; +import { ArgumentType } from './base'; + +function generate_function(opname: string) { + return (...args: ArgumentType[]) => { + const operation = createOperation(opname); + return operation.forward(...args); + }; +} + +function generate_unary_function(opname: string) { + return (a: Tensor | number) => { + if (typeof a == 'number') { + a = new Tensor(a); + } + + const operation = createOperation(opname); + return operation.forward(a); + }; +} + +function generate_binary_function(opname: string) { + return (a: Tensor | number, b: Tensor | number) => { + if (typeof a == 'number') { + a = new Tensor(a); + } + + if (typeof b == 'number') { + b = new Tensor(b); + } + + const operation = createOperation(opname); + return operation.forward(a, b); + }; +} + +// debug operations + +/** + * @ignore + * Get left index in a binary function + */ +export const __left_index__ = generate_binary_function('__left_index__'); + +/** + * @ignore + * Get right index in a binary function + */ +export const __right_index__ = generate_binary_function('__right_index__'); + +// binary pointwise + +/** + * Adds two tensors element-wise. + */ +export const add = generate_binary_function('add'); + +/** + * Subtracts the second tensor from the first tensor element-wise. + */ +export const sub = generate_binary_function('sub'); + +/** + * Multiplies two tensors element-wise. + */ +export const mul = generate_binary_function('mul'); + +/** + * Divides the first tensor by the second tensor element-wise. + */ +export const div = generate_binary_function('div'); + +/** + * Raises the first tensor to the power of the second tensor element-wise. + */ +export const pow = generate_binary_function('pow'); + +/** + * Computes the element-wise remainder of the division of the first tensor by the second tensor. + */ +export const fmod = generate_binary_function('fmod'); + +/** + * Returns the element-wise maximum of the two tensors. + */ +export const maximum = generate_binary_function('maximum'); + +/** + * Returns the element-wise minimum of the two tensors. + */ +export const minimum = generate_binary_function('minimum'); + +// unary pointwise + +/** + * Computes the natural logarithm of the input tensor element-wise. + */ +export const log = generate_unary_function('log'); + +/** + * Computes the square root of the input tensor element-wise. + */ +export const sqrt = generate_unary_function('sqrt'); + +/** + * Computes the exponential of the input tensor element-wise. + */ +export const exp = generate_unary_function('exp'); + +/** + * Computes the square of the input tensor element-wise. + */ +export const square = generate_unary_function('square'); + +/** + * Computes the absolute value of the input tensor element-wise. + */ +export const abs = generate_unary_function('abs'); + +/** + * Computes the sign of the input tensor element-wise. + */ +export const sign = generate_unary_function('sign'); + +/** + * Negates the input tensor element-wise. + */ +export const neg = generate_unary_function('neg'); + +/** + * Computes the reciprocal of the input tensor element-wise. + */ +export const reciprocal = generate_unary_function('reciprocal'); + +/** + * Replaces NaN values in the input tensor with 0, positive infinity with a large finite number, and negative infinity with a small finite number. + */ +export const nan_to_num = generate_unary_function('nan_to_num'); + +/** + * Reshapes the input tensor to the given shape. + */ +export const reshape = generate_function('reshape'); + +/** + * Removes all dimensions of size 1 from the input tensor. + */ +export const squeeze = generate_function('squeeze'); + +/** + * Adds a dimension of size 1 to the input tensor at the given position. + */ +export const unsqueeze = generate_function('unsqueeze'); + +/** + * Expands the input tensor to the given shape. + */ +export const expand = generate_function('expand'); + +// trigonometric + +/** + * Computes the sine of the input tensor element-wise. + */ +export const sin = generate_unary_function('sin'); + +/** + * Computes the cosine of the input tensor element-wise. + */ +export const cos = generate_unary_function('cos'); + +/** + * Computes the tangent of the input tensor element-wise. + */ +export const tan = generate_unary_function('tan'); + +// reduction + +/** + * Computes the sum of the elements of the input tensor. + */ +export const sum = generate_function('sum'); + +/** + * Computes the mean of the elements of the input tensor. + */ +export const mean = generate_function('mean'); + +/** + * Computes the minimum of the elements of the input tensor. + */ +export const min = generate_function('min'); + +/** + * Computes the maximum of the elements of the input tensor. + */ +export const max = generate_function('max'); + +// linalg + +/** + * Transposes the input tensor. + */ +export const transpose = generate_function('transpose'); + +/** + * Computes the matrix product of the two input tensors. + */ +export const matmul = generate_binary_function('matmul'); + +// comparison + +/** + * Checks if the first tensor is less than the second tensor element-wise. + */ +export const lt = generate_binary_function('lt'); + +/** + * Checks if the first tensor is greater than the second tensor element-wise. + */ +export const gt = generate_binary_function('gt'); + +/** + * Checks if the first tensor is less than or equal to the second tensor element-wise. + */ +export const le = generate_binary_function('le'); + +/** + * Checks if the first tensor is greater than or equal to the second tensor element-wise. + */ +export const ge = generate_binary_function('ge'); + +/** + * Checks if the first tensor is equal to the second tensor element-wise. + */ +export const eq = generate_binary_function('eq'); + +/** + * Checks if the first tensor is not equal to the second tensor element-wise. + */ +export const ne = generate_binary_function('ne'); + +/** + * Checks if the two tensors are equal element-wise within a given tolerance. + */ +export function allclose( + a: Tensor, + b: Tensor, + rtol: number = 1e-5, + atol: number = 1e-8, + equal_nan: boolean = false +): boolean { + return a.allclose(b, rtol, atol, equal_nan); +} + +/** + * Returns the number of elements in the input tensor. + */ +export function numel(a: Tensor): number { + return a.dataLength(); +} + +/** + * Flattens the input tensor. + */ +export function flatten(input: Tensor, start_dim: number = 0, end_dim: number = -1): Tensor { + return input.flatten(start_dim, end_dim); +} + +/** + * Concatenates tensors along a given dimension. + */ +export function cat(tensors: Tensor[], dim: number = 0): Tensor { + const operation = createOperation('cat'); + return operation.forward(tensors, dim); +} + +/** + * Alias for {@link cat}. + */ +export const concatenate = cat; + +/** + * Alias for {@link cat}. + */ +export const concat = cat; + +/** + * Computes the softmax of the input tensor along the given dimension. + */ +export function softmax(input: Tensor, dim: number): Tensor { + const operation = createOperation('softmax'); + return operation.forward(input, dim); +} + +/** + * Clamps all elements in input tensor to the range [min, max]. + */ +export function clamp(input: Tensor, min: number, max: number): Tensor { + const operation = createOperation('clamp'); + return operation.forward(input, min, max); +} + +/** + * Alias for {@link clamp}. + */ +export const clip = clamp; + +/** + * Stack tensors along a new dimension. + */ +export function stack(tensors: Tensor[], dim: number = 0): Tensor { + return cat(tensors.map(t => t.unsqueeze(dim)), dim); +} diff --git a/src/functions/mixin.ts b/src/functions/mixin.ts new file mode 100644 index 00000000..da326fb6 --- /dev/null +++ b/src/functions/mixin.ts @@ -0,0 +1,229 @@ +import { Tensor } from '../tensor'; +import { + _broadcast_shape, + _get_original_index, + _pad_shape +} from '../broadcasting'; +import { TorchFunction, BinaryFunction, UnaryFunction, nullOp, resultRequiresGrad } from './base'; +import { registerOperation } from './registry'; +import { _get_reduction_shape, _get_strides, _ravel_index, _unravel_index } from './util'; + +export function BinaryFunctionMixin( + operation: (a: number[], b: number[], a_index: number, b_index: number) => number, + backward_operations: (a?: Tensor, b?: Tensor, aFn?: TorchFunction, bFn?: TorchFunction, dz?: Tensor) => void, + opName: string | null = null +): typeof BinaryFunction { + const kernel = ( + a: number[], + as: number[], + b: number[], + bs: number[], + bcs: number[], + output_size: number + ) => { + const res = Array(output_size); + for (let x = 0; x < output_size; x++) { + const a_index = _get_original_index(as, bcs, x); + const b_index = _get_original_index(bs, bcs, x); + res[x] = operation(a, b, a_index, b_index); + } + return res; + }; + + const forward_tensor = (a: Tensor, b: Tensor, operation: TorchFunction | null = null): Tensor => { + const broadcast_shape = _broadcast_shape(a.shape, b.shape); + const padded_a_shape = _pad_shape(a.shape, broadcast_shape); + const padded_b_shape = _pad_shape(b.shape, broadcast_shape); + + const output_size = broadcast_shape.reduce((acc, val) => acc * val, 1); + + return new Tensor( + kernel( + a.data, + padded_a_shape, + b.data, + padded_b_shape, + broadcast_shape, + output_size + ) as number[], + { requires_grad: resultRequiresGrad(a, b) }, + { operation: operation, shape: broadcast_shape } + ); + }; + + const result = { + [opName]: class extends BinaryFunction { + protected _forward(a: Tensor, b: Tensor): Tensor { + const rg = resultRequiresGrad(a, b); + if (rg) { + this.saved_tensors = [a, b]; + } + this.next_functions.push(a.grad_fn ? a.grad_fn : nullOp); + this.next_functions.push(b.grad_fn ? b.grad_fn : nullOp); + return forward_tensor(a, b, rg ? this : null); + } + + protected _backward(dz: Tensor): void { + const [a, b] = this.saved_tensors; + const [aFn, bFn] = this.next_functions; + + backward_operations(a, b, aFn, bFn, dz); + } + } + }[opName]; + if (opName) { + registerOperation(opName, result); + } + return result; +} + +export function UnaryFunctionMixin( + operation: (a: number[], x: number) => number, + backward_operations: (a?: Tensor, aFn?: TorchFunction, dz?: Tensor) => void, + opName: string | null = null +): typeof UnaryFunction { + const kernel = (a: number[], output_size: number) => { + const res = Array(output_size); + for (let x = 0; x < output_size; x++) { + res[x] = operation(a, x); + } + return res; + }; + const forward_tensor = (a: Tensor, operation: TorchFunction | null = null): Tensor => { + const output_size = a.dataLength(); + + return new Tensor( + kernel(a.data, output_size) as number[], + { requires_grad: resultRequiresGrad(a) }, + { operation: operation, shape: a.shape } + ); + }; + + const result = { + [opName]: class extends UnaryFunction { + protected _forward(a: Tensor): Tensor { + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + } + this.next_functions.push(a.grad_fn ? a.grad_fn : nullOp); + return forward_tensor(a, rg ? this : null); + } + + protected _backward(dz: Tensor): void { + const [a] = this.saved_tensors; + const [aFn] = this.next_functions; + + backward_operations(a, aFn, dz); + } + } + }[opName]; + if (opName) { + registerOperation(opName, result); + } + return result; +} + +export function ReductionFunctionMixin( + init_val: number, + reduce_op: (acc: number, val: number) => number, + backward_operations: ( + a: Tensor, + restored_dz: Tensor, + dim: number | number[], + keepdim: boolean + ) => Tensor, + opName: string | null = null, + finalize_op?: (acc: number, count: number) => number +): new () => TorchFunction { + const result = { + [opName]: class extends TorchFunction { + protected dim?: number | number[]; + protected keepdim?: boolean; + + protected _forward(a: Tensor, dim?: number | number[], keepdim: boolean = false): Tensor { + this.dim = dim; + this.keepdim = keepdim; + + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + } + this.next_functions.push(a.grad_fn ? a.grad_fn : nullOp); + + const out_shape = _get_reduction_shape(a.shape, dim, keepdim); + const out_size = out_shape.reduce((acc, val) => acc * val, 1); + + const res_data = new Array(out_size).fill(init_val); + const counts = new Array(out_size).fill(0); // Tracked specifically for mean() + + const in_strides = _get_strides(a.shape); + const out_strides = _get_strides(out_shape); + + const dims = dim === undefined ? [] : Array.isArray(dim) ? dim : [dim]; + const normalized_dims = dims.map(d => (d < 0 ? d + a.shape.length : d)); + const is_full_reduce = dim === undefined; + + // Accumulate + const aData = a.data; // cache: avoids repeated allocations for view tensors + for (let i = 0; i < aData.length; i++) { + const in_coords = _unravel_index(i, in_strides); + let out_coords: number[]; + + if (is_full_reduce) { + out_coords = keepdim ? in_coords.map(() => 0) : []; + } else { + out_coords = []; + for (let j = 0; j < a.shape.length; j++) { + if (normalized_dims.includes(j)) { + if (keepdim) out_coords.push(0); // Collapse dimension to index 0 + } else { + out_coords.push(in_coords[j]); + } + } + } + + const out_idx = _ravel_index(out_coords, out_strides); + res_data[out_idx] = reduce_op(res_data[out_idx], aData[i]); + counts[out_idx]++; + } + + // Finalize (e.g., divide by count for mean) + if (finalize_op) { + for (let i = 0; i < out_size; i++) { + res_data[i] = finalize_op(res_data[i], counts[i]); + } + } + + return new Tensor( + res_data, + { requires_grad: rg }, + { operation: rg ? this : null, shape: out_shape } + ); + } + + protected _backward(dz: Tensor): void { + const [a] = this.saved_tensors; + const [aFn] = this.next_functions; + + let restored_dz = dz; + + const target_shape = _get_reduction_shape(a.shape, this.dim, true); + + if (dz.shape.length !== target_shape.length) { + restored_dz = dz.reshape(target_shape); + } + + const expanded_dz = restored_dz.expand(a.shape); + const grad_a = backward_operations(a, expanded_dz, this.dim, this.keepdim); + + aFn.backward(grad_a); + } + } + }[opName]; + + if (opName) { + registerOperation(opName, result); + } + return result; +} diff --git a/src/functions/ops.ts b/src/functions/ops.ts new file mode 100644 index 00000000..982d6cc7 --- /dev/null +++ b/src/functions/ops.ts @@ -0,0 +1,1414 @@ +import { Tensor } from '../tensor'; +import { + _broadcast_shape, + _get_original_index, + _pad_shape, + _unbroadcast +} from '../broadcasting'; +import { TorchFunction, BinaryFunction, nullOp, resultRequiresGrad } from './base'; +import * as functional from './functional'; +import { registerOperation } from './registry'; +import { ones } from '../creation'; +import { UnaryFunctionMixin, BinaryFunctionMixin, ReductionFunctionMixin } from './mixin'; +import { _get_reduction_shape } from './util'; + +function unbroadcast(result: Tensor, original_shape: number[]): Tensor { + const unbroadcasted_result = _unbroadcast(result.shape, original_shape, result.data); + return new Tensor(unbroadcasted_result, { requires_grad: result.requires_grad }, { shape: original_shape }); +} + +function broadcast(tensor: Tensor, result_shape: number[]): Tensor { + return tensor.mul(ones(result_shape)); +} + +// debug operations + +const __Left_index__ = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, _b_index: number) => a_index, + () => { }, + "__left_index__" +); + +const __Right_index__ = BinaryFunctionMixin( + (a: number[], b: number[], _a_index: number, b_index: number) => b_index, + () => { }, + "__right_index__" +); + +// binary pointwise + +const Add = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => a[a_index] + b[b_index], + (_a, _b, aFn, bFn, dz) => { + aFn.backward(dz); + bFn.backward(dz); + }, + "add" +); + +const Sub = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => a[a_index] - b[b_index], + (_a, _b, aFn, bFn, dz) => { + aFn.backward(dz); + bFn.backward(dz.mul(new Tensor(-1))); + }, + "sub" +); + +const Mul = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => a[a_index] * b[b_index], + (a, b, aFn, bFn, dz) => { + aFn.backward(dz.mul(b)); + bFn.backward(dz.mul(a)); + }, + "mul" +); + +const Div = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => a[a_index] / b[b_index], + (a, b, aFn, bFn, dz) => { + aFn.backward(dz.div(b)); + bFn.backward(dz.mul(a).mul(new Tensor(-1)).div(b).div(b)); + }, + "div" +); + +function _where(mask: Tensor, x: Tensor, fallback: Tensor | number): Tensor { + const fb = typeof fallback === 'number' ? fallback : null; + const maskData = mask.data; + const xData = x.data; + const fbData = fb === null ? (fallback as Tensor).data : null; + const data = new Array(x.dataLength()); + for (let i = 0; i < data.length; i++) { + data[i] = maskData[i] ? xData[i] : (fb !== null ? fb : fbData![i]); + } + return new Tensor(data, {}, { shape: x.shape }); +} + +const Pow = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => Math.pow(a[a_index], b[b_index]), + (a, b, aFn, bFn, dz) => { + const ga = dz.mul(b).mul(a.pow(b.sub(new Tensor(1)))); + const gb = dz.mul(a.pow(b)).mul(a.log()); + // When a==0, grads can produce NaN/Inf (from 0*Inf or log(0)); replace with 0 + aFn.backward(_where(a.ne(0), ga, ga.nan_to_num())); + bFn.backward(_where(a.ne(0), gb, 0)); + }, + "pow" +); + +const Fmod = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => a[a_index] % b[b_index], + (_a, _b, aFn, _bFn, dz) => { + aFn.backward(dz); + }, + "fmod" +); + +const Maximum = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => Math.max(a[a_index], b[b_index]), + (a, b, aFn, bFn, dz) => { + // When a == b, PyTorch splits gradient 0.5 each + const eq_mask = a.eq(b); + const a_mask = a.gt(b).add(eq_mask.mul(new Tensor(0.5))); + const b_mask = b.gt(a).add(eq_mask.mul(new Tensor(0.5))); + aFn.backward(dz.mul(a_mask)); + bFn.backward(dz.mul(b_mask)); + }, + "maximum" +); + +const Minimum = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => Math.min(a[a_index], b[b_index]), + (a, b, aFn, bFn, dz) => { + // When a == b, PyTorch splits gradient 0.5 each + const eq_mask = a.eq(b); + const a_mask = a.lt(b).add(eq_mask.mul(new Tensor(0.5))); + const b_mask = b.lt(a).add(eq_mask.mul(new Tensor(0.5))); + aFn.backward(dz.mul(a_mask)); + bFn.backward(dz.mul(b_mask)); + }, + "minimum" +); + +function _powint_tensor(a: Tensor, n: number, operation: TorchFunction | null = null): Tensor { + const aData = a.data; + const data = new Array(a.dataLength()); + for (let i = 0; i < data.length; i++) { + data[i] = Math.pow(aData[i], n); + } + return new Tensor( + data, + { requires_grad: resultRequiresGrad(a) }, + { operation: operation, shape: a.shape } + ); +} + +class PowInt extends TorchFunction { + private n: number; + protected _forward(a: Tensor, n: number): Tensor { + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + this.n = n; + } + + this.next_functions.push(a.grad_fn ? a.grad_fn : nullOp); + return _powint_tensor(a, n, rg ? this : null); + } + protected _backward(dz: Tensor): void { + const [a] = this.saved_tensors; + const n = this.n; + const [aFn] = this.next_functions; + + // backward_operations: + aFn.backward(dz.mul(n).mul(a.pow(n - 1))); + } +} +registerOperation("powint", PowInt); + +// unary pointwise + +const Log = UnaryFunctionMixin( + (a: number[], a_index: number) => Math.log(a[a_index]), + (a, aFn, dz) => { + aFn.backward(dz.mul(new Tensor(1).div(a))); + }, + "log" +); + +const Sqrt = UnaryFunctionMixin( + (a: number[], x: number) => Math.sqrt(a[x]), + (a, aFn, dz) => { + aFn.backward(dz.mul(new Tensor(1).div(a.sqrt()).div(2))); + }, + "sqrt" +); + +const Exp = UnaryFunctionMixin( + (a: number[], x: number) => Math.exp(a[x]), + (a, aFn, dz) => { + aFn.backward(dz.mul(a.exp())); + }, + "exp" +); + +const Square = UnaryFunctionMixin( + (a: number[], x: number) => a[x] * a[x], + (a, aFn, dz) => { + aFn.backward(dz.mul(a).mul(new Tensor(2))); + }, + "square" +); + +const Abs = UnaryFunctionMixin( + (a: number[], x: number) => Math.abs(a[x]), + (a, aFn, dz) => { + aFn.backward(dz.mul(functional.sign(a))); + }, + "abs" +); + +const Sign = UnaryFunctionMixin( + (a: number[], x: number) => Math.sign(a[x]), + (_a, aFn) => { + aFn.backward(0); + }, + "sign" +); + +const Neg = UnaryFunctionMixin( + (a: number[], x: number) => -a[x], + (_a, aFn, dz) => { + aFn.backward(dz.mul(new Tensor(-1))); + }, + "neg" +); + +const Reciprocal = UnaryFunctionMixin( + (a: number[], x: number) => 1 / a[x], + (a, aFn, dz) => { + aFn.backward(dz.mul(a.pow(-2)).neg()); + }, + "reciprocal" +); + +const NanToNum = UnaryFunctionMixin( + (a: number[], x: number) => { + const v = a[x]; + if (Number.isNaN(v)) return 0; + if (v === Infinity) return 3.4028235e+38; + if (v === -Infinity) return -3.4028235e+38; + return v; + }, + (a, aFn, dz) => { + aFn.backward(dz); + }, + "nan_to_num" +); + +class Reshape extends TorchFunction { + protected _forward(a: Tensor, shape: number[]) { + const previous_length = a.dataLength(); + + const negIdx = shape.indexOf(-1); + if (negIdx !== -1) { + if (shape.indexOf(-1, negIdx + 1) !== -1) { + throw new Error('Only one -1 is allowed in reshape shape'); + } + const known = shape.reduce((acc, val, i) => i === negIdx ? acc : acc * val, 1); + if (previous_length % known !== 0) { + throw new Error('Shape mismatch: cannot infer -1 dimension for shape ' + a.shape + ' -> ' + shape); + } + shape = shape.slice(); + shape[negIdx] = previous_length / known; + } + + const target_length = shape.reduce((acc, val) => acc * val, 1); + + if (previous_length !== target_length) { + throw new Error('Shape mismatch: ' + a.shape + ' and ' + shape); + } + + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + } + if (a.grad_fn) { + this.next_functions.push(a.grad_fn); + } else { + this.next_functions.push(nullOp); + } + + return new Tensor( + a.data, + { requires_grad: rg }, + { operation: rg ? this : null, shape } + ); + } + protected _backward(dz: Tensor) { + const [a] = this.saved_tensors; + const [aFn] = this.next_functions; + + aFn.backward(dz.reshape(a.shape)); + } +} +registerOperation('reshape', Reshape); + +class Flatten extends TorchFunction { + protected _forward(a: Tensor, start_dim: number = 0, end_dim: number = -1) { + const ndim = a.shape.length; + const sd = start_dim < 0 ? start_dim + ndim : start_dim; + const ed = end_dim < 0 ? end_dim + ndim : end_dim; + const newShape = [ + ...a.shape.slice(0, sd), + a.shape.slice(sd, ed + 1).reduce((acc, val) => acc * val, 1), + ...a.shape.slice(ed + 1), + ]; + + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + } + if (a.grad_fn) { + this.next_functions.push(a.grad_fn); + } else { + this.next_functions.push(nullOp); + } + + return new Tensor( + a.data, + { requires_grad: rg }, + { operation: rg ? this : null, shape: newShape } + ); + } + protected _backward(dz: Tensor) { + const [a] = this.saved_tensors; + const [aFn] = this.next_functions; + + aFn.backward(dz.reshape(a.shape)); + } +} +registerOperation('flatten', Flatten); + +class Squeeze extends TorchFunction { + protected _forward(a: Tensor, dim?: number) { + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + } + if (a.grad_fn) { + this.next_functions.push(a.grad_fn); + } else { + this.next_functions.push(nullOp); + } + + let shape = [...a.shape]; + + if (dim !== undefined) { + if (dim < 0) { + dim += a.shape.length; + } + + // PyTorch only squeezes the specified dimension if its size is exactly 1 + if (shape[dim] === 1) { + shape.splice(dim, 1); + } + } else { + // If no dim is provided, strip out all dimensions of size 1 + shape = shape.filter((d) => d !== 1); + } + + return new Tensor( + a.data, + { requires_grad: rg }, + { operation: rg ? this : null, shape } + ); + } + + protected _backward(dz: Tensor) { + const [a] = this.saved_tensors; + const [aFn] = this.next_functions; + + // The derivative of squeeze is just reshaping the gradient + // back to the original unsqueezed shape. + aFn.backward(dz.reshape(a.shape)); + } +} +registerOperation('squeeze', Squeeze); + +class Unsqueeze extends TorchFunction { + protected _forward(a: Tensor, dim: number) { + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + } + if (a.grad_fn) { + this.next_functions.push(a.grad_fn); + } else { + this.next_functions.push(nullOp); + } + + if (dim < 0) { + dim += a.shape.length + 1; + } + + const shape = [...a.shape]; + shape.splice(dim, 0, 1); + + return new Tensor( + a.data, + { requires_grad: rg }, + { operation: rg ? this : null, shape } + ); + } + protected _backward(dz: Tensor) { + const [a] = this.saved_tensors; + const [aFn] = this.next_functions; + + // backward_operations: + aFn.backward(dz.reshape(a.shape)); + } +} +registerOperation('unsqueeze', Unsqueeze); + +class Expand extends TorchFunction { + protected _forward(a: Tensor, expanded_shape: number[]): Tensor { + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + } + if (a.grad_fn) { + this.next_functions.push(a.grad_fn); + } else { + this.next_functions.push(nullOp); + } + + const offset = expanded_shape.length - a.shape.length; + const target_shape = expanded_shape.map((dim, i) => { + if (dim === -1) { + const orig_i = i - offset; + return orig_i >= 0 ? a.shape[orig_i] : 1; + } + return dim; + }); + + // Steal data from just broadcasting + const outData = broadcast(a, target_shape).data; + + return new Tensor( + outData, + { requires_grad: rg }, + { operation: rg ? this : null, shape: target_shape } + ); + } + + protected _backward(dz: Tensor): void { + const [a] = this.saved_tensors; + const [aFn] = this.next_functions; + + // Route the collapsed gradient upstream + aFn.backward(unbroadcast(dz, a.shape)); + } +} +registerOperation('expand', Expand) + +// trigonometric + +const Sin = UnaryFunctionMixin( + (a: number[], x: number) => Math.sin(a[x]), + (a, aFn, dz) => { + aFn.backward(dz.mul(a.cos())); + }, + "sin" +); + +const Cos = UnaryFunctionMixin( + (a: number[], x: number) => Math.cos(a[x]), + (a, aFn, dz) => { + aFn.backward(dz.mul(a.sin().neg())); + }, + "cos" +); + +const Tan = UnaryFunctionMixin( + (a: number[], x: number) => Math.tan(a[x]), + (a, aFn, dz) => { + aFn.backward(dz.mul(a.cos().pow(-2))); + }, + "tan" +); + +// reduction + +export const Sum = ReductionFunctionMixin( + 0, + (acc, val) => acc + val, + (a, expanded_dz) => expanded_dz, + 'sum' +); + +export const Mean = ReductionFunctionMixin( + 0, + (acc, val) => acc + val, + (a, expanded_dz, dim) => { + const target_shape = _get_reduction_shape(a.shape, dim, false); + const out_size = target_shape.length > 0 ? target_shape.reduce((acc, v) => acc * v, 1) : 1; + const N = a.dataLength() / out_size; + + return expanded_dz.mul(new Tensor([1 / N])); + }, + 'mean', + (acc, count) => acc / count +); + +export const Max = ReductionFunctionMixin( + -Infinity, + (acc, val) => Math.max(acc, val), + (a, expanded_dz, dim) => { + const max_tensor = a.max(dim, true); + const max_expanded = max_tensor.expand(a.shape); + const mask = a.eq(max_expanded).detach(); + + return expanded_dz.mul(mask); + }, + 'max' +); + +export const Min = ReductionFunctionMixin( + Infinity, + (acc, val) => Math.min(acc, val), + (a, expanded_dz, dim) => { + const min_tensor = a.min(dim, true); + const min_expanded = min_tensor.expand(a.shape); + const mask = a.eq(min_expanded).detach(); + + return expanded_dz.mul(mask); + }, + 'min' +); + +// linalg + +function _transpose_tensor( + a: Tensor, + dim0: number, + dim1: number, + operation: TorchFunction | null = null +): Tensor { + if (a.shape.length + dim0 < 0 || a.shape.length + dim1 < 0) { + throw new Error(`Transpose: Dimension out of range (${dim0} and ${dim1})`); + } + dim0 = dim0 < 0 ? a.shape.length + dim0 : dim0; + dim1 = dim1 < 0 ? a.shape.length + dim1 : dim1; + + const output_shape = [...a.shape]; + [output_shape[dim0], output_shape[dim1]] = [output_shape[dim1], output_shape[dim0]]; + const size = a.dataLength(); + const data = new Array(size); + const aData = a.data; + + const a_strides = new Array(a.shape.length); + const out_strides = new Array(output_shape.length); + for (let i = a.shape.length - 1, s = 1; i >= 0; i--) { + a_strides[i] = s; + s *= a.shape[i]; + } + for (let i = output_shape.length - 1, s = 1; i >= 0; i--) { + out_strides[i] = s; + s *= output_shape[i]; + } + + for (let i = 0; i < size; i++) { + let idx = i; + let input_idx = 0; + for (let d = 0; d < output_shape.length; d++) { + const stride = out_strides[d]; + const coord = Math.floor(idx / stride); + idx %= stride; + + let input_d = d; + if (d === dim0) input_d = dim1; + else if (d === dim1) input_d = dim0; + + input_idx += coord * a_strides[input_d]; + } + data[i] = aData[input_idx]; + } + + return new Tensor( + data, + { requires_grad: resultRequiresGrad(a) }, + { operation: operation, shape: output_shape } + ); +} +class Transpose extends TorchFunction { + private dim0: number; + private dim1: number; + protected _forward(a: Tensor, dim0: number, dim1: number): Tensor { + const rg = resultRequiresGrad(a); + if (rg) { + this.saved_tensors = [a]; + this.dim0 = dim0; + this.dim1 = dim1; + } + this.next_functions.push(a.grad_fn ? a.grad_fn : nullOp); + return _transpose_tensor(a, dim0, dim1, rg ? this : null); + } + protected _backward(dz: Tensor): void { + // const [a] = this.saved_tensors; + const dim0 = this.dim0; + const dim1 = this.dim1; + const [aFn] = this.next_functions; + + // backward_operations: + aFn.backward(dz.transpose(dim0, dim1)); + } +} +registerOperation('transpose', Transpose); + +function _matmul_tensor(a: Tensor, b: Tensor, operation: TorchFunction | null = null): [Tensor, number[]] { + if (a.shape.length == 1 && b.shape.length == 1) { + return [a.mul(b).sum(), []]; + } + + const a_1d = a.shape.length == 1; + const b_1d = b.shape.length == 1; + + const a_shape = a_1d ? [1, a.shape[0]] : a.shape; + const b_shape = b_1d ? [b.shape[0], 1] : b.shape; + + if (a_shape[a_shape.length - 1] != b_shape[b_shape.length - 2]) { + throw new Error(`Shapes cannot be multiplied (${a_shape.join("x")} and ${b_shape.join("x")})`); + } + + const broadcast_shape = _broadcast_shape(a_shape.slice(0, -2), b_shape.slice(0, -2)).concat([ + a_shape[a_shape.length - 2], + b_shape[b_shape.length - 1] + ]); + + const output_size = broadcast_shape.reduce((acc, val) => acc * val, 1); + const data = new Array(output_size).fill(0); + + const padded_a_shape = _pad_shape(a_shape, broadcast_shape); + const padded_b_shape = _pad_shape(b_shape, broadcast_shape); + + const dim_M = broadcast_shape[broadcast_shape.length - 2]; + const dim_N = broadcast_shape[broadcast_shape.length - 1]; + const dim_K = a_shape[a_shape.length - 1]; // or b_shape[b_shape.length - 2] + + const aData = a.data; + const bData = b.data; + for (let i = 0; i < output_size; i++) { + const mn_idx = i % (dim_M * dim_N); + const m = Math.floor(mn_idx / dim_N); + const n = mn_idx % dim_N; + + const base_a = _get_original_index(padded_a_shape, broadcast_shape, i - n); + const base_b = _get_original_index(padded_b_shape, broadcast_shape, i - m * dim_N); + + let sum = 0; + for (let k = 0; k < dim_K; k++) { + sum += aData[base_a + k] * bData[base_b + k * dim_N]; + } + data[i] = sum; + } + + let shape_after_removing_extra_dims = [...broadcast_shape]; + + if (a_1d) { + shape_after_removing_extra_dims = shape_after_removing_extra_dims + .slice(0, -2) + .concat([broadcast_shape[broadcast_shape.length - 1]]); + } + + if (b_1d) { + shape_after_removing_extra_dims = shape_after_removing_extra_dims.slice(0, -1); + } + + return [new Tensor( + data, + { requires_grad: resultRequiresGrad(a, b) }, + { operation: operation, shape: shape_after_removing_extra_dims } + ), shape_after_removing_extra_dims]; +} + +class Matmul extends BinaryFunction { + private shape: number[]; + + protected _forward(a: Tensor, b: Tensor): Tensor { + const rg = resultRequiresGrad(a, b); + if (rg) { + this.saved_tensors = [a, b]; + } + this.next_functions.push(a.grad_fn ? a.grad_fn : nullOp); + this.next_functions.push(b.grad_fn ? b.grad_fn : nullOp); + const result = _matmul_tensor(a, b, rg ? this : null); + this.shape = result[1]; + return result[0]; + } + protected _backward(dz: Tensor): void { + const [a, b] = this.saved_tensors; + const [aFn, bFn] = this.next_functions; + + // 1. 1D x 1D (Dot Product) + if (a.shape.length === 1 && b.shape.length === 1) { + aFn.backward(dz.mul(b)); + bFn.backward(dz.mul(a)); + return; + } + + // 2. 1D x ND + if (a.shape.length === 1) { + const dz1 = dz.unsqueeze(-2); + const a1 = a.unsqueeze(-2); + + let da = dz1.matmul(b.transpose(-2, -1)); + let db = a1.transpose(-2, -1).matmul(dz1); + + da = da.squeeze(-2); + db = unbroadcast(db, b.shape); + + aFn.backward(da); + bFn.backward(db); + return; + } + + // 3. ND x 1D + if (b.shape.length === 1) { + const dz1 = dz.unsqueeze(-1); + const b1 = b.unsqueeze(-1); + + let da = dz1.matmul(b1.transpose(-2, -1)); + let db = a.transpose(-2, -1).matmul(dz1); + + da = unbroadcast(da, a.shape); + db = db.squeeze(-1); + + aFn.backward(da); + bFn.backward(db); + return; + } + + // 4. ND x ND (Batched or Standard) + let da = dz.matmul(b.transpose(-2, -1)); + let db = a.transpose(-2, -1).matmul(dz); + + da = unbroadcast(da, a.shape); + db = unbroadcast(db, b.shape); + + aFn.backward(da); + bFn.backward(db); + } +} +registerOperation('matmul', Matmul); + +function _convNd_forward( + input: Tensor, + weight: Tensor, + bias: Tensor | null, + stride: number | number[], + padding: number | number[], + dilation: number | number[], + groups: number, + dims: number +): Tensor { + const stride_arr = typeof stride === 'number' ? new Array(dims).fill(stride) : stride; + const padding_arr = typeof padding === 'number' ? new Array(dims).fill(padding) : padding; + const dilation_arr = typeof dilation === 'number' ? new Array(dims).fill(dilation) : dilation; + + const batch_size = input.shape[0]; + const in_channels = input.shape[1]; + const out_channels = weight.shape[0]; + const in_dims = input.shape.slice(2); + const kernel_dims = weight.shape.slice(2); + + if (in_channels !== weight.shape[1] * groups) { + throw new Error(`in_channels (${in_channels}) must be divisible by groups (${groups}) and match weight.shape[1] * groups (${weight.shape[1] * groups})`); + } + + const out_dims = in_dims.map((in_dim, i) => { + return Math.floor((in_dim + 2 * padding_arr[i] - dilation_arr[i] * (kernel_dims[i] - 1) - 1) / stride_arr[i] + 1); + }); + + const output_shape = [batch_size, out_channels, ...out_dims]; + const output_size = output_shape.reduce((a, b) => a * b, 1); + const output_data = new Array(output_size).fill(0); + + const get_strides = (shape: number[]) => { + const strides = new Array(shape.length); + let s = 1; + for (let i = shape.length - 1; i >= 0; i--) { + strides[i] = s; + s *= shape[i]; + } + return strides; + }; + + const in_strides = get_strides(input.shape); + const w_strides = get_strides(weight.shape); + const out_strides = get_strides(output_shape); + const in_channels_per_group = in_channels / groups; + const out_channels_per_group = out_channels / groups; + const inputData = input.data; + const weightData = weight.data; + const biasData = bias ? bias.data : null; + + for (let b = 0; b < batch_size; b++) { + for (let g = 0; g < groups; g++) { + for (let oc_g = 0; oc_g < out_channels_per_group; oc_g++) { + const oc = g * out_channels_per_group + oc_g; + + // Iterate over output spatial dimensions + const out_spatial_size = out_dims.reduce((a, b) => a * b, 1); + for (let os_idx = 0; os_idx < out_spatial_size; os_idx++) { + + // Decode output spatial index + const os_coords = new Array(dims); + let temp_os = os_idx; + for (let d = dims - 1; d >= 0; d--) { + os_coords[d] = temp_os % out_dims[d]; + temp_os = Math.floor(temp_os / out_dims[d]); + } + + let sum = biasData ? biasData[oc] : 0; + + // Iterate over kernel spatial dimensions and in_channels + for (let ic_g = 0; ic_g < in_channels_per_group; ic_g++) { + const ic = g * in_channels_per_group + ic_g; + + const kernel_spatial_size = kernel_dims.reduce((a, b) => a * b, 1); + for (let ks_idx = 0; ks_idx < kernel_spatial_size; ks_idx++) { + // Decode kernel spatial index + const ks_coords = new Array(dims); + let temp_ks = ks_idx; + for (let d = dims - 1; d >= 0; d--) { + ks_coords[d] = temp_ks % kernel_dims[d]; + temp_ks = Math.floor(temp_ks / kernel_dims[d]); + } + + // Calculate input spatial coordinates + let is_valid = true; + const is_coords = new Array(dims); + for (let d = 0; d < dims; d++) { + const in_coord = os_coords[d] * stride_arr[d] + ks_coords[d] * dilation_arr[d] - padding_arr[d]; + if (in_coord < 0 || in_coord >= in_dims[d]) { + is_valid = false; + break; + } + is_coords[d] = in_coord; + } + + if (is_valid) { + // Calculate flattened indices + let in_flat_idx = b * in_strides[0] + ic * in_strides[1]; + for (let d = 0; d < dims; d++) in_flat_idx += is_coords[d] * in_strides[d + 2]; + + let w_flat_idx = oc * w_strides[0] + ic_g * w_strides[1]; + for (let d = 0; d < dims; d++) w_flat_idx += ks_coords[d] * w_strides[d + 2]; + + sum += inputData[in_flat_idx] * weightData[w_flat_idx]; + } + } + } + + // Calculate output flattened index + let out_flat_idx = b * out_strides[0] + oc * out_strides[1]; + for (let d = 0; d < dims; d++) out_flat_idx += os_coords[d] * out_strides[d + 2]; + + output_data[out_flat_idx] = sum; + } + } + } + } + + return new Tensor(output_data, { requires_grad: false }, { shape: output_shape }); +} + +function _convNd_backward( + dz: Tensor, + input: Tensor, + weight: Tensor, + bias: Tensor | null, + stride: number | number[], + padding: number | number[], + dilation: number | number[], + groups: number, + dims: number, + input_requires_grad: boolean, + weight_requires_grad: boolean +): [Tensor | null, Tensor | null, Tensor | null] { + const stride_arr = typeof stride === 'number' ? new Array(dims).fill(stride) : stride; + const padding_arr = typeof padding === 'number' ? new Array(dims).fill(padding) : padding; + const dilation_arr = typeof dilation === 'number' ? new Array(dims).fill(dilation) : dilation; + + const batch_size = input.shape[0]; + const in_channels = input.shape[1]; + const out_channels = weight.shape[0]; + const in_dims = input.shape.slice(2); + const kernel_dims = weight.shape.slice(2); + const out_dims = dz.shape.slice(2); + + const get_strides = (shape: number[]) => { + const strides = new Array(shape.length); + let s = 1; + for (let i = shape.length - 1; i >= 0; i--) { + strides[i] = s; + s *= shape[i]; + } + return strides; + }; + + const in_strides = get_strides(input.shape); + const w_strides = get_strides(weight.shape); + const dz_strides = get_strides(dz.shape); + const dzData = dz.data; + const weightDataBwd = weight.data; + const inputDataBwd = input.data; + + let dInput: Tensor | null = null; + let dWeight: Tensor | null = null; + let dBias: Tensor | null = null; + + let dInput_data: number[] | null = null; + let dWeight_data: number[] | null = null; + + if (input_requires_grad) { + dInput_data = new Array(input.dataLength()).fill(0); + } + if (weight_requires_grad) { + dWeight_data = new Array(weight.dataLength()).fill(0); + } + + const in_channels_per_group = in_channels / groups; + const out_channels_per_group = out_channels / groups; + + for (let b = 0; b < batch_size; b++) { + for (let g = 0; g < groups; g++) { + for (let oc_g = 0; oc_g < out_channels_per_group; oc_g++) { + const oc = g * out_channels_per_group + oc_g; + + const out_spatial_size = out_dims.reduce((a, b) => a * b, 1); + for (let os_idx = 0; os_idx < out_spatial_size; os_idx++) { + + const os_coords = new Array(dims); + let temp_os = os_idx; + for (let d = dims - 1; d >= 0; d--) { + os_coords[d] = temp_os % out_dims[d]; + temp_os = Math.floor(temp_os / out_dims[d]); + } + + let dz_flat_idx = b * dz_strides[0] + oc * dz_strides[1]; + for (let d = 0; d < dims; d++) dz_flat_idx += os_coords[d] * dz_strides[d + 2]; + const dz_val = dzData[dz_flat_idx]; + + for (let ic_g = 0; ic_g < in_channels_per_group; ic_g++) { + const ic = g * in_channels_per_group + ic_g; + + const kernel_spatial_size = kernel_dims.reduce((a, b) => a * b, 1); + for (let ks_idx = 0; ks_idx < kernel_spatial_size; ks_idx++) { + const ks_coords = new Array(dims); + let temp_ks = ks_idx; + for (let d = dims - 1; d >= 0; d--) { + ks_coords[d] = temp_ks % kernel_dims[d]; + temp_ks = Math.floor(temp_ks / kernel_dims[d]); + } + + let is_valid = true; + const is_coords = new Array(dims); + for (let d = 0; d < dims; d++) { + const in_coord = os_coords[d] * stride_arr[d] + ks_coords[d] * dilation_arr[d] - padding_arr[d]; + if (in_coord < 0 || in_coord >= in_dims[d]) { + is_valid = false; + break; + } + is_coords[d] = in_coord; + } + + if (is_valid) { + let in_flat_idx = b * in_strides[0] + ic * in_strides[1]; + for (let d = 0; d < dims; d++) in_flat_idx += is_coords[d] * in_strides[d + 2]; + + let w_flat_idx = oc * w_strides[0] + ic_g * w_strides[1]; + for (let d = 0; d < dims; d++) w_flat_idx += ks_coords[d] * w_strides[d + 2]; + + if (input_requires_grad) { + dInput_data![in_flat_idx] += dz_val * weightDataBwd[w_flat_idx]; + } + if (weight_requires_grad) { + dWeight_data![w_flat_idx] += dz_val * inputDataBwd[in_flat_idx]; + } + } + } + } + } + } + } + } + + if (input_requires_grad) dInput = new Tensor(dInput_data!, { requires_grad: false }, { shape: input.shape }); + if (weight_requires_grad) dWeight = new Tensor(dWeight_data!, { requires_grad: false }, { shape: weight.shape }); + if (bias && bias.requires_grad) { + const sum_dims = [0]; + for (let d = 2; d < dz.shape.length; d++) sum_dims.push(d); + dBias = dz.sum(sum_dims); + } + + return [dInput, dWeight, dBias]; +} + +class Conv1dOp extends TorchFunction { + private stride: number | number[]; + private padding: number | number[]; + private dilation: number | number[]; + private groups: number; + + protected _forward( + input: Tensor, + weight: Tensor, + bias: Tensor | null, + stride: number | number[] = 1, + padding: number | number[] = 0, + dilation: number | number[] = 1, + groups: number = 1 + ): Tensor { + const rg = resultRequiresGrad(input, weight, ...(bias ? [bias] : [])); + if (rg) { + this.saved_tensors = [input, weight]; + if (bias) this.saved_tensors.push(bias); + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + this.next_functions.push(weight.grad_fn ? weight.grad_fn : nullOp); + if (bias) this.next_functions.push(bias.grad_fn ? bias.grad_fn : nullOp); + + this.stride = stride; + this.padding = padding; + this.dilation = dilation; + this.groups = groups; + + const res = _convNd_forward(input, weight, bias, stride, padding, dilation, groups, 1); + res.requires_grad = rg; + res.grad_fn = rg ? this : null; + return res; + } + + protected _backward(dz: Tensor): void { + const input = this.saved_tensors[0]; + const weight = this.saved_tensors[1]; + const bias = this.saved_tensors.length > 2 ? this.saved_tensors[2] : null; + const [inputFn, weightFn, biasFn] = this.next_functions; + + const [dInput, dWeight, dBias] = _convNd_backward( + dz, input, weight, bias, this.stride, this.padding, this.dilation, this.groups, 1, + input.requires_grad, weight.requires_grad + ); + + if (input.requires_grad) inputFn.backward(dInput); + if (weight.requires_grad) weightFn.backward(dWeight); + if (bias && bias.requires_grad) biasFn.backward(dBias); + } +} +registerOperation('conv1d', Conv1dOp); + +class Conv2dOp extends TorchFunction { + private stride: number | number[]; + private padding: number | number[]; + private dilation: number | number[]; + private groups: number; + + protected _forward( + input: Tensor, + weight: Tensor, + bias: Tensor | null, + stride: number | number[] = 1, + padding: number | number[] = 0, + dilation: number | number[] = 1, + groups: number = 1 + ): Tensor { + const rg = resultRequiresGrad(input, weight, ...(bias ? [bias] : [])); + if (rg) { + this.saved_tensors = [input, weight]; + if (bias) this.saved_tensors.push(bias); + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + this.next_functions.push(weight.grad_fn ? weight.grad_fn : nullOp); + if (bias) this.next_functions.push(bias.grad_fn ? bias.grad_fn : nullOp); + + this.stride = stride; + this.padding = padding; + this.dilation = dilation; + this.groups = groups; + + const res = _convNd_forward(input, weight, bias, stride, padding, dilation, groups, 2); + res.requires_grad = rg; + res.grad_fn = rg ? this : null; + return res; + } + + protected _backward(dz: Tensor): void { + const input = this.saved_tensors[0]; + const weight = this.saved_tensors[1]; + const bias = this.saved_tensors.length > 2 ? this.saved_tensors[2] : null; + const [inputFn, weightFn, biasFn] = this.next_functions; + + const [dInput, dWeight, dBias] = _convNd_backward( + dz, input, weight, bias, this.stride, this.padding, this.dilation, this.groups, 2, + input.requires_grad, weight.requires_grad + ); + + if (input.requires_grad) inputFn.backward(dInput); + if (weight.requires_grad) weightFn.backward(dWeight); + if (bias && bias.requires_grad) biasFn.backward(dBias); + } +} +registerOperation('conv2d', Conv2dOp); + +class Conv3dOp extends TorchFunction { + private stride: number | number[]; + private padding: number | number[]; + private dilation: number | number[]; + private groups: number; + + protected _forward( + input: Tensor, + weight: Tensor, + bias: Tensor | null, + stride: number | number[] = 1, + padding: number | number[] = 0, + dilation: number | number[] = 1, + groups: number = 1 + ): Tensor { + const rg = resultRequiresGrad(input, weight, ...(bias ? [bias] : [])); + if (rg) { + this.saved_tensors = [input, weight]; + if (bias) this.saved_tensors.push(bias); + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + this.next_functions.push(weight.grad_fn ? weight.grad_fn : nullOp); + if (bias) this.next_functions.push(bias.grad_fn ? bias.grad_fn : nullOp); + + this.stride = stride; + this.padding = padding; + this.dilation = dilation; + this.groups = groups; + + const res = _convNd_forward(input, weight, bias, stride, padding, dilation, groups, 3); + res.requires_grad = rg; + res.grad_fn = rg ? this : null; + return res; + } + + protected _backward(dz: Tensor): void { + const input = this.saved_tensors[0]; + const weight = this.saved_tensors[1]; + const bias = this.saved_tensors.length > 2 ? this.saved_tensors[2] : null; + const [inputFn, weightFn, biasFn] = this.next_functions; + + const [dInput, dWeight, dBias] = _convNd_backward( + dz, input, weight, bias, this.stride, this.padding, this.dilation, this.groups, 3, + input.requires_grad, weight.requires_grad + ); + + if (input.requires_grad) inputFn.backward(dInput); + if (weight.requires_grad) weightFn.backward(dWeight); + if (bias && bias.requires_grad) biasFn.backward(dBias); + } +} +registerOperation('conv3d', Conv3dOp); + + +// comparison + +const Lt = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => (a[a_index] < b[b_index]) ? 1 : 0, + () => { }, + "lt" +); + +const Gt = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => (a[a_index] > b[b_index]) ? 1 : 0, + () => { }, + "gt" +); + +const Le = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => (a[a_index] <= b[b_index]) ? 1 : 0, + () => { }, + "le" +); + +const Ge = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => (a[a_index] >= b[b_index]) ? 1 : 0, + () => { }, + "ge" +); + +const Eq = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => (a[a_index] == b[b_index]) ? 1 : 0, + () => { }, + "eq" +); + +const Ne = BinaryFunctionMixin( + (a: number[], b: number[], a_index: number, b_index: number) => (a[a_index] != b[b_index]) ? 1 : 0, + () => { }, + "ne" +); + +class Cat extends TorchFunction { + private _dim: number; + private _input_sizes: number[]; + + protected _forward(tensors: Tensor[], dim: number = 0): Tensor { + if (tensors.length === 0) { + throw new Error('torch.cat: expected a non-empty list of Tensors'); + } + + const ndim = tensors[0].shape.length; + if (ndim === 0) { + throw new Error('torch.cat: zero-dimensional tensors cannot be concatenated'); + } + + if (dim < 0) dim = dim + ndim; + if (dim < 0 || dim >= ndim) { + throw new Error(`torch.cat: dimension out of range`); + } + + for (let i = 1; i < tensors.length; i++) { + if (tensors[i].shape.length !== ndim) { + throw new Error('torch.cat: all tensors must have the same number of dimensions'); + } + for (let d = 0; d < ndim; d++) { + if (d !== dim && tensors[i].shape[d] !== tensors[0].shape[d]) { + throw new Error(`torch.cat: all tensors must have the same shape, except in dimension ${dim}`); + } + } + } + + const rg = resultRequiresGrad(...tensors); + if (rg) { + this.saved_tensors = [...tensors]; + this._dim = dim; + this._input_sizes = tensors.map(t => t.shape[dim]); + } + + for (const t of tensors) { + this.next_functions.push(t.grad_fn ? t.grad_fn : nullOp); + } + + const result_shape = [...tensors[0].shape]; + result_shape[dim] = tensors.reduce((sum, t) => sum + t.shape[dim], 0); + + const inner_size = result_shape.slice(dim + 1).reduce((a, b) => a * b, 1); + const outer_size = result_shape.slice(0, dim).reduce((a, b) => a * b, 1); + const total_size = result_shape.reduce((a, b) => a * b, 1); + + const data = new Array(total_size); + let out_idx = 0; + for (let o = 0; o < outer_size; o++) { + for (const t of tensors) { + const block_size = t.shape[dim] * inner_size; + const tData = t.data; + const src_base = o * block_size; + for (let i = 0; i < block_size; i++) { + data[out_idx++] = tData[src_base + i]; + } + } + } + + return new Tensor(data, { requires_grad: rg }, { operation: rg ? this : null, shape: result_shape }); + } + + protected _backward(dz: Tensor): void { + const tensors = this.saved_tensors; + const dim = this._dim; + const input_sizes = this._input_sizes; + + const inner_size = dz.shape.slice(dim + 1).reduce((a, b) => a * b, 1); + const outer_size = dz.shape.slice(0, dim).reduce((a, b) => a * b, 1); + const dzData = dz.data; + + const grad_datas: number[][] = tensors.map(t => new Array(t.dataLength())); + + let dz_idx = 0; + for (let o = 0; o < outer_size; o++) { + for (let i = 0; i < tensors.length; i++) { + const block_size = input_sizes[i] * inner_size; + const dst_base = o * block_size; + for (let j = 0; j < block_size; j++) { + grad_datas[i][dst_base + j] = dzData[dz_idx++]; + } + } + } + + for (let i = 0; i < tensors.length; i++) { + this.next_functions[i].backward( + new Tensor(grad_datas[i], {}, { shape: tensors[i].shape }) + ); + } + } +} +registerOperation('cat', Cat); + +// softmax + +class Softmax extends TorchFunction { + private dim: number; + + protected _forward(input: Tensor, dim: number): Tensor { + this.dim = dim; + const rg = resultRequiresGrad(input); + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + + const shape = input.shape; + const ndim = shape.length; + const d = dim < 0 ? dim + ndim : dim; + + const inputData = input.data; + const outputData = new Array(input.dataLength()); + + const outer = shape.slice(0, d).reduce((a, b) => a * b, 1); + const dimSize = shape[d]; + const inner = shape.slice(d + 1).reduce((a, b) => a * b, 1); + + for (let o = 0; o < outer; o++) { + for (let i = 0; i < inner; i++) { + let maxVal = -Infinity; + for (let k = 0; k < dimSize; k++) { + const idx = o * dimSize * inner + k * inner + i; + if (inputData[idx] > maxVal) maxVal = inputData[idx]; + } + let sumExp = 0; + for (let k = 0; k < dimSize; k++) { + const idx = o * dimSize * inner + k * inner + i; + sumExp += Math.exp(inputData[idx] - maxVal); + } + for (let k = 0; k < dimSize; k++) { + const idx = o * dimSize * inner + k * inner + i; + outputData[idx] = Math.exp(inputData[idx] - maxVal) / sumExp; + } + } + } + + const result = new Tensor(outputData, { requires_grad: rg }, { operation: rg ? this : null, shape: [...shape] }); + if (rg) { + this.saved_tensors = [result]; // save output for efficient backward + } + return result; + } + + protected _backward(dz: Tensor): void { + const [softmaxOut] = this.saved_tensors; + const [inputFn] = this.next_functions; + + const shape = softmaxOut.shape; + const ndim = shape.length; + const d = this.dim < 0 ? this.dim + ndim : this.dim; + + const sData = softmaxOut.data; + const dzData = dz.data; + + const outer = shape.slice(0, d).reduce((a, b) => a * b, 1); + const dimSize = shape[d]; + const inner = shape.slice(d + 1).reduce((a, b) => a * b, 1); + + const gradData = new Array(softmaxOut.dataLength()); + + for (let o = 0; o < outer; o++) { + for (let i = 0; i < inner; i++) { + // dot(dz, softmax) along this slice + let dotProd = 0; + for (let k = 0; k < dimSize; k++) { + const idx = o * dimSize * inner + k * inner + i; + dotProd += dzData[idx] * sData[idx]; + } + // grad = softmax * (dz - dot) + for (let k = 0; k < dimSize; k++) { + const idx = o * dimSize * inner + k * inner + i; + gradData[idx] = sData[idx] * (dzData[idx] - dotProd); + } + } + } + + inputFn.backward(new Tensor(gradData, {}, { shape: [...shape] })); + } +} +registerOperation('softmax', Softmax); + +// clamp + +class Clamp extends TorchFunction { + private min_val: number; + private max_val: number; + + protected _forward(input: Tensor, min_val: number, max_val: number): Tensor { + this.min_val = min_val; + this.max_val = max_val; + const rg = resultRequiresGrad(input); + if (rg) { + this.saved_tensors = [input]; + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + + const inputData = input.data; + const outputData = inputData.map(v => Math.min(Math.max(v, min_val), max_val)); + + return new Tensor(outputData, { requires_grad: rg }, { operation: rg ? this : null, shape: [...input.shape] }); + } + + protected _backward(dz: Tensor): void { + const [input] = this.saved_tensors; + const [inputFn] = this.next_functions; + const min_val = this.min_val; + const max_val = this.max_val; + + const inputData = input.data; + const dzData = dz.data; + const gradData = inputData.map((v, i) => + (v > min_val && v < max_val) ? dzData[i] : 0 + ); + + inputFn.backward(new Tensor(gradData, {}, { shape: [...input.shape] })); + } +} +registerOperation('clamp', Clamp); diff --git a/src/functions/registry.ts b/src/functions/registry.ts new file mode 100644 index 00000000..c4f82271 --- /dev/null +++ b/src/functions/registry.ts @@ -0,0 +1,41 @@ +import { TorchFunction, TorchFunctionConstructor } from './base'; + +// Only allow registering concrete, constructible Operation classes +const operations = new Map(); +const operations_cache = new Map(); + +export function registerOperation(name: string, func: TorchFunctionConstructor) { + operations.set(name, func); +} + +function getOperation(name: string): TorchFunctionConstructor { + const func = operations.get(name); + if (!func) { + throw new Error(`Operation '${name}' is not registered.`); + } + return func; +} + +export function _getAllOperationNames(): Iterable { + return operations.keys(); +} + +export function getOperationCache(name: string): TorchFunction { + const operation = operations_cache.get(name); + if (!operation) { + const op = new (getOperation(name))(); + op.opName = name; + operations_cache.set(name, op); + return op; + } + return operation; +} + +/** + * Create a new operation instance with its opName set. + */ +export function createOperation(name: string): TorchFunction { + const op = new (getOperation(name))(); + op.opName = name; + return op; +} diff --git a/src/functions/util.ts b/src/functions/util.ts new file mode 100644 index 00000000..23ea6a99 --- /dev/null +++ b/src/functions/util.ts @@ -0,0 +1,36 @@ +export function _get_strides(shape: number[]): number[] { + const strides = new Array(shape.length).fill(1); + for (let i = shape.length - 2; i >= 0; i--) { + strides[i] = strides[i + 1] * shape[i + 1]; + } + return strides; +} + +export function _unravel_index(index: number, strides: number[]): number[] { + return strides.map((stride) => { + const coord = Math.floor(index / stride); + index %= stride; + return coord; + }); +} + +export function _ravel_index(coords: number[], strides: number[]): number { + return coords.reduce((acc, coord, i) => acc + coord * strides[i], 0); +} + +export function _get_reduction_shape( + shape: number[], + dim?: number | number[], + keepdim: boolean = false +): number[] { + if (dim === undefined) return keepdim ? shape.map(() => 1) : []; + + const dims = Array.isArray(dim) ? dim : [dim]; + const normalized_dims = dims.map((d) => (d < 0 ? d + shape.length : d)); + + if (keepdim) { + return shape.map((s, i) => (normalized_dims.includes(i) ? 1 : s)); + } else { + return shape.filter((_, i) => !normalized_dims.includes(i)); + } +} diff --git a/src/grad_mode.ts b/src/grad_mode.ts new file mode 100644 index 00000000..3bf9b729 --- /dev/null +++ b/src/grad_mode.ts @@ -0,0 +1,42 @@ +/** + * Global gradient computation mode. + * + * When disabled, all operations behave as if no input requires grad, + * so no computation graph is built and no tensors are saved for backward. + */ +let _grad_enabled = true; + +export function is_grad_enabled(): boolean { + return _grad_enabled; +} + +/** + * Disable gradient computation. Returns the previous state + * so it can be restored later with `disable_no_grad(prev)`. + */ +export function enable_no_grad(): boolean { + const prev = _grad_enabled; + _grad_enabled = false; + return prev; +} + +/** + * Restore gradient computation to a previous state + * (as returned by `enable_no_grad`). + */ +export function disable_no_grad(prev: boolean): void { + _grad_enabled = prev; +} + +/** + * Execute `fn` with gradient computation disabled. + * The previous grad mode is always restored, even if `fn` throws. + */ +export function no_grad(fn: () => T): T { + const prev = enable_no_grad(); + try { + return fn(); + } finally { + disable_no_grad(prev); + } +} diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 00000000..4c1b70ef --- /dev/null +++ b/src/index.ts @@ -0,0 +1,38 @@ +export { Tensor, FloatTensor, LongTensor } from './tensor'; +export type { TypedArray, NestedNumberArray } from './tensor'; +export { no_grad, enable_no_grad, disable_no_grad, is_grad_enabled } from './grad_mode'; + +export { TorchFunction, AccumulateGrad } from './functions/base'; +export * from './functions/ops'; +export * from './functions/functional'; + +export * from './creation/index'; + +export * as nn from './nn/index'; + +// Allow for torch.relu -> torch.nn.functional.relu (and others) +export * from './nn/functional'; + +export * as optim from './optim/index'; + +export { seed, manual_seed } from './prng'; + +export { eventBus, events } from './util'; + +export { export_, ExportedProgram } from './export'; +export type { GraphNode, InputSpec, OutputSpec, GraphSignature } from './export'; + +import { Tensor } from './tensor'; + +export function is_tensor(obj: unknown): boolean { + return obj instanceof Tensor; +} + +export function is_nonzero(input: Tensor): boolean { + if (input.numel() !== 1) { + throw new Error( + `Boolean value of Tensor with more than one element is ambiguous` + ); + } + return input.item() !== 0; +} diff --git a/src/nn/base.ts b/src/nn/base.ts new file mode 100644 index 00000000..c2babbb7 --- /dev/null +++ b/src/nn/base.ts @@ -0,0 +1,116 @@ +import { Tensor } from '../tensor'; +import { Parameter } from './parameter'; + +export { Parameter } from './parameter'; + +export abstract class Module { + private _modules: { [key: string]: Module }; + private _parameters: { [key: string]: Parameter }; + public training: boolean = true; + + constructor() { + this._parameters = {}; + this._modules = {}; + } + + private register_parameter(parameter_name: string, parameter: Parameter) { + this._parameters[parameter_name] = parameter; + } + + private register_module(module_name: string, module: Module) { + this._modules[module_name] = module; + } + + protected register(name: string, value: Parameter | Module) { + if (value instanceof Parameter) { + this.register_parameter(name, value); + } else { + this.register_module(name, value); + } + } + + public abstract forward(...args: Tensor[]): Tensor; + + /** + * Entry point for running the module. Equivalent to `model(x)` in Python. + * In the future, this is where forward hooks will be triggered. + * Call `forward()` directly to bypass hooks. + */ + public call(...args: Tensor[]): Tensor { + return this.forward(...args); + } + + public train(mode: boolean = true): this { + this.training = mode; + for (const module of Object.values(this._modules)) { + module.train(mode); + } + return this; + } + + public eval(): this { + return this.train(false); + } + + public parameters(): Parameter[] { + let params: Parameter[] = Object.values(this._parameters); + for (const module of Object.values(this._modules)) { + params = params.concat(module.parameters()); + } + return params; + } + + public named_parameters(prefix: string = ''): [string, Parameter][] { + const result: [string, Parameter][] = []; + for (const [name, param] of Object.entries(this._parameters)) { + const fullName = prefix ? `${prefix}.${name}` : name; + result.push([fullName, param]); + } + for (const [name, module] of Object.entries(this._modules)) { + const fullName = prefix ? `${prefix}.${name}` : name; + result.push(...module.named_parameters(fullName)); + } + return result; + } +} + +export class Sequential extends Module { + private _modulesArr: Module[]; + + constructor(...modules: Module[]) { + super(); + this._modulesArr = modules; + for (let i = 0; i < modules.length; i++) { + this.register(i.toString(), modules[i]); + } + } + + append(module: Module): this { + this.register(this._modulesArr.length.toString(), module); + this._modulesArr.push(module); + return this; + } + + extend(sequential: Sequential): this { + for (const module of sequential._modulesArr) { + this.append(module); + } + return this; + } + + insert(index: number, module: Module): this { + this._modulesArr.splice(index, 0, module); + for (let i = index; i < this._modulesArr.length; i++) { + this.register(i.toString(), this._modulesArr[i]); + } + return this; + } + + forward(input: Tensor) { + let x = input; + for (const module of this._modulesArr) { + x = module.call(x); + } + return x; + } +} diff --git a/src/nn/functional.ts b/src/nn/functional.ts new file mode 100644 index 00000000..fe450a78 --- /dev/null +++ b/src/nn/functional.ts @@ -0,0 +1,34 @@ +import { Tensor } from '../tensor'; +import { createOperation } from '../functions/registry'; +import { ArgumentType } from '../functions/base'; + +function generate_function(opname: string) { + return (...args: ArgumentType[]) => { + const operation = createOperation(opname); + return operation.forward(...args); + }; +} + +function generate_unary_function(opname: string) { + return (a: Tensor | number) => { + if (typeof a == 'number') { + a = new Tensor(a); + } + + const operation = createOperation(opname); + return operation.forward(a); + }; +} + +export const relu = generate_unary_function('relu'); +export const sigmoid = generate_unary_function('sigmoid'); +export const tanh = generate_unary_function('tanh'); + +export const leaky_relu = generate_function('leaky_relu'); + +export const conv1d = generate_function('conv1d'); +export const conv2d = generate_function('conv2d'); +export const conv3d = generate_function('conv3d'); +export const cross_entropy = generate_function('cross_entropy_loss'); +export const nll_loss = generate_function('nll_loss'); +export const max_pool2d = generate_function('max_pool2d'); diff --git a/src/nn/index.ts b/src/nn/index.ts new file mode 100644 index 00000000..9e30122a --- /dev/null +++ b/src/nn/index.ts @@ -0,0 +1,7 @@ +import './ops'; + +export * from './base'; +export * from './loss'; +export * from './module'; +export * as functional from './functional'; +export * as parameter from './parameter'; diff --git a/src/nn/loss.ts b/src/nn/loss.ts new file mode 100644 index 00000000..8381df90 --- /dev/null +++ b/src/nn/loss.ts @@ -0,0 +1,92 @@ +import { Tensor } from "../tensor"; +import { Module } from "./base"; +import { createOperation } from "../functions/registry"; + +export type Reduction = 'mean' | 'sum' | 'none'; + +function applyReduction(loss: Tensor, reduction: Reduction): Tensor { + if (reduction === 'mean') return loss.mean(); + if (reduction === 'sum') return loss.sum(); + return loss; +} + +abstract class Loss extends Module { + abstract forward(input: Tensor, target: Tensor): Tensor; +} + +export class MSELoss extends Loss { + private reduction: Reduction; + + constructor(reduction: Reduction = 'mean') { + super(); + this.reduction = reduction; + } + + forward(input: Tensor, target: Tensor) { + const unreduced = input.sub(target).pow(2); + return applyReduction(unreduced, this.reduction); + } +} + +export class L1Loss extends Loss { + private reduction: Reduction; + + constructor(reduction: Reduction = 'mean') { + super(); + this.reduction = reduction; + } + + forward(input: Tensor, target: Tensor) { + const unreduced = input.sub(target).abs(); + return applyReduction(unreduced, this.reduction); + } +} + +export class BCELoss extends Loss { + private weight: Tensor | null; + private reduction: Reduction; + + constructor(weight: Tensor | null = null, reduction: Reduction = 'mean') { + super(); + this.weight = weight; + this.reduction = reduction; + } + + forward(input: Tensor, target: Tensor) { + const left = target.mul(input.log()); + const right = target.neg().add(1).mul(input.neg().add(1).log()); + let unreduced = left.add(right).neg(); + if (this.weight) { + unreduced = unreduced.mul(this.weight); + } + return applyReduction(unreduced, this.reduction); + } +} + +export class CrossEntropyLoss extends Loss { + private reduction: Reduction; + + constructor(reduction: Reduction = 'mean') { + super(); + this.reduction = reduction; + } + + forward(input: Tensor, target: Tensor) { + const op = createOperation('cross_entropy_loss'); + return op.forward(input, target, this.reduction); + } +} + +export class NLLLoss extends Loss { + private reduction: Reduction; + + constructor(reduction: Reduction = 'mean') { + super(); + this.reduction = reduction; + } + + forward(input: Tensor, target: Tensor) { + const op = createOperation('nll_loss'); + return op.forward(input, target, this.reduction); + } +} diff --git a/src/nn/module.ts b/src/nn/module.ts new file mode 100644 index 00000000..81ca53f5 --- /dev/null +++ b/src/nn/module.ts @@ -0,0 +1,304 @@ +import { Module, Parameter } from './base'; +import { rand } from '../creation'; +import * as functional from './functional'; +import { softmax as _softmax } from '../functions/functional'; +import { Tensor } from '../tensor'; +import { getRng } from '../prng'; + +export class Linear extends Module { + public weight: Parameter; + public bias: Parameter | null; + + constructor(in_features: number, out_features: number, bias: boolean = true) { + super(); + const k = Math.sqrt(1 / in_features); + + this.weight = new Parameter( + rand([out_features, in_features]) + .mul(2 * k) + .sub(k) + ); + this.register('weight', this.weight); + + if (bias) { + this.bias = new Parameter( + rand([out_features]) + .mul(2 * k) + .sub(k) + ); + this.register('bias', this.bias); + } else { + this.bias = null; + } + } + + forward(input: Tensor) { + const out = input.matmul(this.weight.transpose(0, 1)); + return this.bias ? out.add(this.bias) : out; + } +} + +export class ReLU extends Module { + constructor() { + super(); + } + + forward(input: Tensor) { + return functional.relu(input); + } +} + +export class Sigmoid extends Module { + constructor() { + super(); + } + + forward(input: Tensor) { + return functional.sigmoid(input); + } +} + +export class Tanh extends Module { + constructor() { + super(); + } + + forward(input: Tensor) { + return functional.tanh(input); + } +} + +abstract class _ConvNd extends Module { + public weight: Parameter; + public bias: Parameter | null; + + public in_channels: number; + public out_channels: number; + public kernel_size: number | number[]; + public stride: number | number[]; + public padding: number | number[]; + public dilation: number | number[]; + public groups: number; + + constructor( + in_channels: number, + out_channels: number, + kernel_size: number | number[], + stride: number | number[], + padding: number | number[], + dilation: number | number[], + groups: number, + bias: boolean, + dims: number + ) { + super(); + + this.in_channels = in_channels; + this.out_channels = out_channels; + this.kernel_size = kernel_size; + this.stride = stride; + this.padding = padding; + this.dilation = dilation; + this.groups = groups; + + if (in_channels % groups !== 0) { + throw new Error('in_channels must be divisible by groups'); + } + if (out_channels % groups !== 0) { + throw new Error('out_channels must be divisible by groups'); + } + + const kernel_arr = + typeof kernel_size === 'number' ? new Array(dims).fill(kernel_size) : kernel_size; + const kernel_vol = kernel_arr.reduce((a: number, b: number) => a * b, 1); + + const k = Math.sqrt(groups / (in_channels * kernel_vol)); + + this.weight = new Parameter( + rand([out_channels, in_channels / groups, ...kernel_arr]) + .mul(2 * k) + .sub(k) + ); + + this.register('weight', this.weight); + + if (bias) { + this.bias = new Parameter( + rand([out_channels]) + .mul(2 * k) + .sub(k) + ); + this.register('bias', this.bias); + } else { + this.bias = null; + } + } + + abstract forward(input: Tensor): Tensor; +} + +export class Conv1d extends _ConvNd { + constructor( + in_channels: number, + out_channels: number, + kernel_size: number | number[], + stride: number | number[] = 1, + padding: number | number[] = 0, + dilation: number | number[] = 1, + groups: number = 1, + bias: boolean = true + ) { + super(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias, 1); + } + + forward(input: Tensor) { + return functional.conv1d( + input, + this.weight, + this.bias, + this.stride, + this.padding, + this.dilation, + this.groups + ); + } +} + +export class Conv2d extends _ConvNd { + constructor( + in_channels: number, + out_channels: number, + kernel_size: number | number[], + stride: number | number[] = 1, + padding: number | number[] = 0, + dilation: number | number[] = 1, + groups: number = 1, + bias: boolean = true + ) { + super(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias, 2); + } + + forward(input: Tensor) { + return functional.conv2d( + input, + this.weight, + this.bias, + this.stride, + this.padding, + this.dilation, + this.groups + ); + } +} + +export class LeakyReLU extends Module { + private negative_slope: number; + + constructor(negative_slope: number = 0.01) { + super(); + this.negative_slope = negative_slope; + } + + forward(input: Tensor) { + return functional.leaky_relu(input, this.negative_slope); + } +} + +export class MaxPool2d extends Module { + private kernel_size: number | number[]; + private stride: number | number[] | undefined; + private padding: number | number[]; + + constructor( + kernel_size: number | number[], + stride?: number | number[], + padding: number | number[] = 0 + ) { + super(); + this.kernel_size = kernel_size; + this.stride = stride; + this.padding = padding; + } + + forward(input: Tensor) { + return functional.max_pool2d(input, this.kernel_size, this.stride, this.padding); + } +} + +export class Dropout extends Module { + private p: number; + + constructor(p: number = 0.5) { + super(); + this.p = p; + } + + forward(input: Tensor) { + if (!this.training || this.p === 0) { + return input; + } + if (this.p === 1) { + return input.mul(0); + } + const scale = 1 / (1 - this.p); + const prng = getRng(); + const maskData = input.toFlatArray().map(() => (prng() > this.p ? scale : 0)); + const mask = new Tensor(maskData, {}, { shape: [...input.shape] }); + return input.mul(mask); + } +} + +export class Softmax extends Module { + private dim: number; + + constructor(dim: number) { + super(); + this.dim = dim; + } + + forward(input: Tensor) { + return _softmax(input, this.dim); + } +} + +export class Flatten extends Module { + private start_dim: number; + private end_dim: number; + + constructor(start_dim: number = 1, end_dim: number = -1) { + super(); + this.start_dim = start_dim; + this.end_dim = end_dim; + } + + forward(input: Tensor) { + return input.flatten(this.start_dim, this.end_dim); + } +} + +export class Conv3d extends _ConvNd { + constructor( + in_channels: number, + out_channels: number, + kernel_size: number | number[], + stride: number | number[] = 1, + padding: number | number[] = 0, + dilation: number | number[] = 1, + groups: number = 1, + bias: boolean = true + ) { + super(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias, 3); + } + + forward(input: Tensor) { + return functional.conv3d( + input, + this.weight, + this.bias, + this.stride, + this.padding, + this.dilation, + this.groups + ); + } +} diff --git a/src/nn/ops.ts b/src/nn/ops.ts new file mode 100644 index 00000000..ae864471 --- /dev/null +++ b/src/nn/ops.ts @@ -0,0 +1,362 @@ +import { UnaryFunctionMixin } from '../functions/mixin'; +import { TorchFunction, resultRequiresGrad, nullOp } from '../functions/base'; +import { registerOperation } from '../functions/registry'; +import { Tensor } from '../tensor'; + +const Relu = UnaryFunctionMixin( + (a: number[], x: number) => Math.max(a[x], 0), + (a, aFn, dz) => { + aFn.backward(dz.mul(a.gt(0))); + }, + "relu" +); + +const Sigmoid = UnaryFunctionMixin( + (a: number[], x: number) => 1 / (1 + Math.exp(-a[x])), + (a, aFn, dz) => { + const res = a.sigmoid(); + aFn.backward(res.mul(res.mul(-1).add(1)).mul(dz)); + }, + "sigmoid" +); + +const Tanh = UnaryFunctionMixin( + (a: number[], x: number) => Math.tanh(a[x]), + (a, aFn, dz) => { + const tanh_a = a.tanh(); + aFn.backward(dz.mul(tanh_a.mul(tanh_a).mul(-1).add(1))); + }, + "tanh" +); + +/** + * CrossEntropyLoss operation. + * + * Forward: + * input – (N, C) logits (unnormalized scores) + * target – (N,) integer class indices in [0, C) + * reduction – 'mean' | 'sum' | 'none' + * + * Backward: + * d_input[i,j] = (softmax(input)[i,j] - 1{j == target[i]}) * scale + */ +class CrossEntropyLossOp extends TorchFunction { + private N: number = 0; + private C: number = 0; + private reduction: string = 'mean'; + + protected _forward(input: Tensor, target: Tensor, reduction: string = 'mean'): Tensor { + this.reduction = reduction; + const rg = resultRequiresGrad(input); + if (rg) { + this.saved_tensors = [input, target]; + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + + const shape = input.shape; // (N, C) + const N = shape[0]; + const C = shape[1]; + this.N = N; + this.C = C; + + const inputData = input.data; + const targetData = target.data; + + // Numerically stable log-softmax + gather + const perSampleLoss = new Array(N); + for (let i = 0; i < N; i++) { + const rowOffset = i * C; + + // Find max for numerical stability + let maxVal = -Infinity; + for (let j = 0; j < C; j++) { + if (inputData[rowOffset + j] > maxVal) { + maxVal = inputData[rowOffset + j]; + } + } + + // Compute log(sum(exp(x - max))) + let sumExp = 0; + for (let j = 0; j < C; j++) { + sumExp += Math.exp(inputData[rowOffset + j] - maxVal); + } + const logSumExp = Math.log(sumExp); + + // log_softmax for the target class + const t = targetData[i]; + const logSoftmax = inputData[rowOffset + t] - maxVal - logSumExp; + perSampleLoss[i] = -logSoftmax; + } + + let lossData: number[]; + let resultShape: number[]; + if (reduction === 'none') { + lossData = perSampleLoss; + resultShape = [N]; + } else if (reduction === 'sum') { + lossData = [perSampleLoss.reduce((a: number, b: number) => a + b, 0)]; + resultShape = []; + } else { + lossData = [perSampleLoss.reduce((a: number, b: number) => a + b, 0) / N]; + resultShape = []; + } + + const result = new Tensor(lossData, { requires_grad: rg }, { operation: rg ? this : null, shape: resultShape }); + return result; + } + + protected _backward(dz: Tensor | number): void { + const [input, target] = this.saved_tensors; + const [inputFn] = this.next_functions; + const N = this.N; + const C = this.C; + const reduction = this.reduction; + + const inputData = input.data; + const targetData = target.data; + + let dzData: number[]; + if (typeof dz === 'number') { + dzData = new Array(reduction === 'none' ? N : 1).fill(dz); + } else { + dzData = [...dz.data]; + } + + const grad = new Array(N * C); + for (let i = 0; i < N; i++) { + const rowOffset = i * C; + + // Compute softmax for this row + let maxVal = -Infinity; + for (let j = 0; j < C; j++) { + if (inputData[rowOffset + j] > maxVal) { + maxVal = inputData[rowOffset + j]; + } + } + let sumExp = 0; + for (let j = 0; j < C; j++) { + sumExp += Math.exp(inputData[rowOffset + j] - maxVal); + } + + const t = targetData[i]; + const dzVal = reduction === 'none' ? dzData[i] : dzData[0]; + const scale = reduction === 'mean' ? dzVal / N : dzVal; + + for (let j = 0; j < C; j++) { + const softmax_j = Math.exp(inputData[rowOffset + j] - maxVal) / sumExp; + const oneHot = j === t ? 1 : 0; + grad[rowOffset + j] = (softmax_j - oneHot) * scale; + } + } + + const gradTensor = new Tensor(grad, {}, { shape: [N, C] }); + inputFn.backward(gradTensor); + } +} + +registerOperation('cross_entropy_loss', CrossEntropyLossOp); + +class LeakyReluOp extends TorchFunction { + private negative_slope: number; + + protected _forward(input: Tensor, negative_slope: number = 0.01): Tensor { + this.negative_slope = negative_slope; + const rg = resultRequiresGrad(input); + if (rg) { + this.saved_tensors = [input]; + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + + const inputData = input.data; + const outputData = inputData.map(v => v > 0 ? v : negative_slope * v); + + return new Tensor(outputData, { requires_grad: rg }, { operation: rg ? this : null, shape: [...input.shape] }); + } + + protected _backward(dz: Tensor): void { + const [input] = this.saved_tensors; + const [inputFn] = this.next_functions; + const ns = this.negative_slope; + + const inputData = input.data; + const dzData = dz.data; + const gradData = inputData.map((v, i) => v > 0 ? dzData[i] : ns * dzData[i]); + + inputFn.backward(new Tensor(gradData, {}, { shape: [...input.shape] })); + } +} +registerOperation('leaky_relu', LeakyReluOp); + +class MaxPool2dOp extends TorchFunction { + private pool_h: number; + private pool_w: number; + private stride_h: number; + private stride_w: number; + private pad_h: number; + private pad_w: number; + private argmax_data: number[]; + private input_ndim: number; + + protected _forward( + input: Tensor, + kernel_size: number | number[], + stride?: number | number[], + padding: number | number[] = 0 + ): Tensor { + const kArr = typeof kernel_size === 'number' ? [kernel_size, kernel_size] : kernel_size as number[]; + const sArr = stride === undefined ? kArr : (typeof stride === 'number' ? [stride, stride] : stride as number[]); + const pArr = typeof padding === 'number' ? [padding, padding] : padding as number[]; + + this.pool_h = kArr[0]; + this.pool_w = kArr[1]; + this.stride_h = sArr[0]; + this.stride_w = sArr[1]; + this.pad_h = pArr[0]; + this.pad_w = pArr[1]; + this.input_ndim = input.shape.length; + + const is4d = input.shape.length === 4; + let N: number, C: number, H: number, W: number; + if (is4d) { + [N, C, H, W] = input.shape; + } else { + N = 1; + [C, H, W] = input.shape; + } + + const H_out = Math.floor((H + 2 * this.pad_h - this.pool_h) / this.stride_h) + 1; + const W_out = Math.floor((W + 2 * this.pad_w - this.pool_w) / this.stride_w) + 1; + + const rg = resultRequiresGrad(input); + if (rg) { + this.saved_tensors = [input]; + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + + const inputData = input.data; + const totalOut = N * C * H_out * W_out; + const outputData = new Array(totalOut); + const argmax = new Array(totalOut); + + for (let n = 0; n < N; n++) { + for (let c = 0; c < C; c++) { + for (let oh = 0; oh < H_out; oh++) { + for (let ow = 0; ow < W_out; ow++) { + let maxVal = -Infinity; + let maxIdx = -1; + for (let kh = 0; kh < this.pool_h; kh++) { + for (let kw = 0; kw < this.pool_w; kw++) { + const ih = oh * this.stride_h - this.pad_h + kh; + const iw = ow * this.stride_w - this.pad_w + kw; + if (ih >= 0 && ih < H && iw >= 0 && iw < W) { + const inputIdx = ((n * C + c) * H + ih) * W + iw; + if (inputData[inputIdx] > maxVal) { + maxVal = inputData[inputIdx]; + maxIdx = inputIdx; + } + } + } + } + const outIdx = ((n * C + c) * H_out + oh) * W_out + ow; + outputData[outIdx] = maxVal; + argmax[outIdx] = maxIdx; + } + } + } + } + + this.argmax_data = argmax; + const outShape = is4d ? [N, C, H_out, W_out] : [C, H_out, W_out]; + + return new Tensor(outputData, { requires_grad: rg }, { operation: rg ? this : null, shape: outShape }); + } + + protected _backward(dz: Tensor): void { + const [input] = this.saved_tensors; + const [inputFn] = this.next_functions; + + const gradData = new Array(input.dataLength()).fill(0); + const dzData = dz.data; + + for (let i = 0; i < dzData.length; i++) { + if (this.argmax_data[i] >= 0) { + gradData[this.argmax_data[i]] += dzData[i]; + } + } + + inputFn.backward(new Tensor(gradData, {}, { shape: [...input.shape] })); + } +} +registerOperation('max_pool2d', MaxPool2dOp); + +class NLLLossOp extends TorchFunction { + private N: number; + private C: number; + private reduction: string; + + protected _forward(input: Tensor, target: Tensor, reduction: string = 'mean'): Tensor { + this.reduction = reduction; + const rg = resultRequiresGrad(input); + if (rg) { + this.saved_tensors = [input, target]; + } + this.next_functions.push(input.grad_fn ? input.grad_fn : nullOp); + + const N = input.shape[0]; + const C = input.shape[1]; + this.N = N; + this.C = C; + + const inputData = input.data; + const targetData = target.data; + + const perSampleLoss = new Array(N); + for (let i = 0; i < N; i++) { + const t = Math.round(targetData[i]); + perSampleLoss[i] = -inputData[i * C + t]; + } + + let lossData: number[]; + let resultShape: number[]; + if (reduction === 'none') { + lossData = perSampleLoss; + resultShape = [N]; + } else if (reduction === 'sum') { + lossData = [perSampleLoss.reduce((a, b) => a + b, 0)]; + resultShape = []; + } else { + lossData = [perSampleLoss.reduce((a, b) => a + b, 0) / N]; + resultShape = []; + } + + return new Tensor(lossData, { requires_grad: rg }, { operation: rg ? this : null, shape: resultShape }); + } + + protected _backward(dz: Tensor | number): void { + const [input, target] = this.saved_tensors; + const [inputFn] = this.next_functions; + const N = this.N; + const C = this.C; + const reduction = this.reduction; + + const targetData = target.data; + + let dzData: number[]; + if (typeof dz === 'number') { + dzData = new Array(reduction === 'none' ? N : 1).fill(dz); + } else { + dzData = [...dz.data]; + } + + const gradData = new Array(N * C).fill(0); + for (let i = 0; i < N; i++) { + const t = Math.round(targetData[i]); + const dzVal = reduction === 'none' ? dzData[i] : dzData[0]; + const scale = reduction === 'mean' ? dzVal / N : dzVal; + gradData[i * C + t] = -scale; + } + + inputFn.backward(new Tensor(gradData, {}, { shape: [N, C] })); + } +} +registerOperation('nll_loss', NLLLossOp); diff --git a/src/nn/parameter.ts b/src/nn/parameter.ts new file mode 100644 index 00000000..9d8791b0 --- /dev/null +++ b/src/nn/parameter.ts @@ -0,0 +1,20 @@ +import { Tensor } from '../tensor'; +import { NestedNumberArray } from '../tensor'; +import { TorchFunction } from '../functions/base'; + +export class Parameter extends Tensor { + constructor( + data: NestedNumberArray | Tensor | Parameter, + // Default to requires_grad=true + options: { requires_grad?: boolean } = { + requires_grad: true + }, + internal_options: { operation?: TorchFunction; shape?: number[] } = {} + ) { + if (data instanceof Tensor) { + super(data.data, { requires_grad: options.requires_grad }, { shape: data.shape }); + } else { + super(data, options, internal_options); + } + } +} diff --git a/src/optim/base.ts b/src/optim/base.ts new file mode 100644 index 00000000..99684b06 --- /dev/null +++ b/src/optim/base.ts @@ -0,0 +1,19 @@ +import { Parameter } from "../nn/base"; + +export abstract class Optimizer { + params: Parameter[]; + defaults: { [key: string]: any }; + + constructor(params: Parameter[], defaults: { [key: string]: any }) { + this.params = params; + this.defaults = defaults; + } + + public zero_grad(): void { + for (const param of this.params) { + param.grad = null; + } + } + + abstract step(): void; +} diff --git a/src/optim/index.ts b/src/optim/index.ts new file mode 100644 index 00000000..3f38deda --- /dev/null +++ b/src/optim/index.ts @@ -0,0 +1,2 @@ +export * from './base'; +export * from './optimizers'; diff --git a/src/optim/optimizers.ts b/src/optim/optimizers.ts new file mode 100644 index 00000000..42381194 --- /dev/null +++ b/src/optim/optimizers.ts @@ -0,0 +1,196 @@ +import { Optimizer } from './base'; +import { Parameter } from '../nn/base'; +import { Tensor } from '../tensor'; +import { zeros_like } from '../creation'; + +export class Adagrad extends Optimizer { + private state: Map = new Map(); + private lr: number; + private lr_decay: number; + private weight_decay: number; + private eps: number; + private step_count: number = 0; + + constructor( + params: Parameter[], + lr: number = 0.01, + lr_decay: number = 0, + weight_decay: number = 0, + eps: number = 1e-10 + ) { + super(params, {}); + this.lr = lr; + this.lr_decay = lr_decay; + this.weight_decay = weight_decay; + this.eps = eps; + } + + step(): void { + this.step_count += 1; + const clr = this.lr / (1 + (this.step_count - 1) * this.lr_decay); + + for (const param of this.params) { + let grad = param.grad; + if (grad === null) continue; + + if (this.weight_decay !== 0) { + grad = grad.add(param.mul(this.weight_decay)); + } + + if (!this.state.has(param)) { + this.state.set(param, { sum: zeros_like(param) }); + } + + const state = this.state.get(param)!; + state.sum = state.sum.add(grad.mul(grad)); + + const update = grad.div(state.sum.sqrt().add(this.eps)).mul(clr); + param.data = param.sub(update).data; + } + } +} + +export class SGD extends Optimizer { + private state: Map = new Map(); + private lr: number; + private momentum: number; + private dampening: number; + private weight_decay: number; + private nesterov: boolean; + private maximize: boolean; + + constructor( + params: Parameter[], + lr: number = 0.001, + momentum: number = 0.0, + dampening: number = 0.0, + weight_decay: number = 0.0, + nesterov: boolean = false, + maximize: boolean = false + ) { + super(params, {}); + this.lr = lr; + this.momentum = momentum; + this.dampening = dampening; + this.weight_decay = weight_decay; + this.nesterov = nesterov; + this.maximize = maximize; + } + + step(): void { + for (const param of this.params) { + if (param.grad === null) continue; + let g = this.maximize ? param.grad.mul(-1) : param.grad; + if (this.weight_decay !== 0) { + g = g.add(param.mul(this.weight_decay)); + } + + if (this.momentum !== 0) { + if (this.state.has(param)) { + let buf = this.state.get(param)!.velocity; + buf = buf.mul(this.momentum); + buf = buf.add(g.mul(1 - this.dampening)); + this.state.set(param, { velocity: buf }); + } else { + this.state.set(param, { velocity: g }); + } + + const buf = this.state.get(param)!.velocity; + + if (this.nesterov) { + g = g.add(buf.mul(this.momentum)); + } else { + g = buf; + } + + this.state.set(param, { velocity: buf }); + } + + // potentially unsafe? + const newParam = param.sub(g.mul(this.lr)); + param.data = newParam.data; + } + } +} + +export class Adam extends Optimizer { + private state: Map< + Parameter, + { + m: Tensor; + v: Tensor; + vmax: Tensor; + } + > = new Map(); + + private step_count: number = 0; + private lr: number; + private beta1: number; + private beta2: number; + private eps: number; + private weight_decay: number; + private amsgrad: boolean; + private maximize: boolean; + + constructor( + params: Parameter[], + lr: number = 0.001, + betas: [number, number] = [0.9, 0.999], + eps: number = 1e-8, + weight_decay: number = 0.0, + amsgrad: boolean = false, + maximize: boolean = false + ) { + super(params, {}); + this.lr = lr; + this.beta1 = betas[0]; + this.beta2 = betas[1]; + this.eps = eps; + this.weight_decay = weight_decay; + this.amsgrad = amsgrad; + this.maximize = maximize; + } + + step(): void { + this.step_count += 1; + for (const param of this.params) { + if (param.grad === null) continue; + let grad = this.maximize ? param.grad.mul(-1) : param.grad; + + if (this.weight_decay !== 0) { + grad = grad.add(param.mul(this.weight_decay)); + } + + // Initialize + if (!this.state.has(param)) { + this.state.set(param, { + m: zeros_like(param), + v: zeros_like(param), + vmax: zeros_like(param) + }); + } + + const state = this.state.get(param)!; + + state.m = state.m.mul(this.beta1).add(grad.mul(1 - this.beta1)); + state.v = state.v.mul(this.beta2).add(grad.mul(grad).mul(1 - this.beta2)); + + const biasCorrection1 = 1 - Math.pow(this.beta1, this.step_count); + const biasCorrection2 = 1 - Math.pow(this.beta2, this.step_count); + + let vhat: Tensor; + const mhat = state.m.div(biasCorrection1); + if (this.amsgrad) { + state.vmax = state.vmax.maximum(state.v); + vhat = state.vmax.div(biasCorrection2); + } else { + vhat = state.v.div(biasCorrection2); + } + + const update = mhat.div(vhat.sqrt().add(this.eps)).mul(this.lr); + + const newParam = param.sub(update); + param.data = newParam.data; + } + } +} diff --git a/src/prng.ts b/src/prng.ts new file mode 100644 index 00000000..89cd18c0 --- /dev/null +++ b/src/prng.ts @@ -0,0 +1,42 @@ +// unseeded by default +let _rng = () => Math.random(); + +export function getRng(): () => number { + return _rng; +} + +export function manual_seed(seed: number): number { + seed = seed >>> 0; // to uint32 + _rng = mulberry32(seed); + return seed; +} + +export function seed(): number { + const s = (Math.random() * 0xffffffff) >>> 0; + _rng = mulberry32(s); + return s; +} + +// https://stackoverflow.com/a/47593316 +function mulberry32(seed: number): () => number { + return function () { + let t = (seed += 0x6d2b79f5); + t = Math.imul(t ^ (t >>> 15), t | 1); + t ^= t + Math.imul(t ^ (t >>> 7), t | 61); + return ((t ^ (t >>> 14)) >>> 0) / 4294967296; + }; +} + +export function uniformDist(min = 0, max = 1) { + return () => min + getRng()() * (max - min); +} + +// https://stackoverflow.com/a/36481059 +export function normalDist(mean = 0, std = 1) { + return function () { + const u = 1 - getRng()(); // [0,1) -> (0,1] + const v = getRng()(); + const z = Math.sqrt(-2.0 * Math.log(u)) * Math.cos(2.0 * Math.PI * v); + return z * std + mean; + }; +} diff --git a/src/tensor.ts b/src/tensor.ts new file mode 100644 index 00000000..9260f1b5 --- /dev/null +++ b/src/tensor.ts @@ -0,0 +1,625 @@ +import { AccumulateGrad, TorchFunction, resultRequiresGrad } from './functions/base'; +import { getOperationCache, createOperation } from './functions/registry'; +import { getNextId, eventBus, events } from './util'; + +export type TypedArray = + | Int8Array + | Uint8Array + | Uint8ClampedArray + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array; + +export type NestedNumberArray = number | TypedArray | NestedNumberArray[]; + +function _get_shape(data: NestedNumberArray): number[] { + if (ArrayBuffer.isView(data)) { + return [data.length]; + } + + const shape = []; + while (Array.isArray(data)) { + shape.push(data.length); + data = data[0]; + } + return shape; +} + +function _assert_shape(data: NestedNumberArray, shape: number[]): void { + if (Array.isArray(data)) { + if (data.length !== shape[0]) { + throw new Error( + `Shape mismatch at dim ${shape.length}: expected ${shape[0]}, got ${data.length}` + ); + } + for (let i = 0; i < data.length; i++) { + _assert_shape(data[i], shape.slice(1)); + } + } else if (ArrayBuffer.isView(data)) { + if (shape.length !== 1) { + throw new Error(`Shape mismatch at dim ${shape.length}: expected 1D, got ${shape}`); + } + if (data.length !== shape[0]) { + throw new Error( + `Shape mismatch at dim ${shape.length}: expected ${shape[0]}, got ${data.length}` + ); + } + } else { + if (shape.length !== 0) { + throw new Error(`Shape mismatch at dim ${shape.length}: expected scalar, got ${data}`); + } + } +} + +function _get_and_assert_shape(data: NestedNumberArray): number[] { + const shape = _get_shape(data); + _assert_shape(data, shape); + return shape; +} + +function _flatten(data: NestedNumberArray): number[] { + if (Array.isArray(data)) { + return data.flatMap(item => _flatten(item)); + } else if (ArrayBuffer.isView(data)) { + return Array.from(data); + } else { + return [data]; + } +} + +/** + * A shared backing store for tensor data. + * Multiple tensors (views) may reference the same TensorStorage instance. + * Mutating `data` on the TensorStorage is visible to all sharing tensors. + */ +export class TensorStorage { + constructor(public data: number[]) {} +} + +export class Tensor { + // Auto-generated ID + public id: number = getNextId(); + + // Optional user-defined name + public name: string | null = null; + + // Shared backing storage and offset into it. + // Views share the same TensorStorage but differ in _offset and shape. + private _storage: TensorStorage = new TensorStorage([]); + private _offset: number = 0; + + /** + * Returns the flat, contiguous data for this tensor. + * + * Fast path (non-view): returns the storage array directly — no allocation. + * View path: materialises a contiguous slice — one allocation per call, + * so callers inside tight loops should cache the result: `const d = t.data`. + */ + get data(): number[] { + const n = this.dataLength(); + if (this._offset === 0 && this._storage.data.length === n) { + return this._storage.data; + } + return this._storage.data.slice(this._offset, this._offset + n); + } + + /** + * Sets the tensor's data. + * + * Non-view (offset=0, storage covers exactly this tensor's numel): + * replaces the shared storage's data array in-place — all other views + * sharing the same TensorStorage immediately see the new values. + * + * View (offset≠0 or storage is larger than this tensor): + * copies `values` element-by-element into the shared storage at the + * correct offset — the original tensor and sibling views are updated. + */ + set data(values: number[]) { + const n = values.length; + if (this._offset === 0 && this._storage.data.length === n) { + // Full-storage owner: swap out the backing array. + this._storage.data = values; + } else { + // View: write into shared storage at the right offset. + for (let i = 0; i < n; i++) { + this._storage.data[this._offset + i] = values[i]; + } + } + } + + public shape: number[]; + public grad_fn: TorchFunction | null = null; + public grad: Tensor | null = null; + + public requires_grad: boolean; + + constructor( + data: NestedNumberArray, + options: { requires_grad?: boolean; name?: string } = {}, + internal_options: { + operation?: TorchFunction; + shape?: number[]; + /** For internal view construction only — share an existing storage. */ + _storage?: TensorStorage; + /** Byte offset into _storage (in elements). */ + _offset?: number; + } = {} + ) { + if (internal_options._storage !== undefined) { + // View construction: share the provided storage. + this._storage = internal_options._storage; + this._offset = internal_options._offset ?? 0; + this.shape = internal_options.shape ?? []; + } else { + this._storage = new TensorStorage(_flatten(data)); + this._offset = 0; + this.shape = internal_options.shape ?? _get_and_assert_shape(data); + } + + this.requires_grad = options.requires_grad ?? false; + + if (options.name) { + this.name = options.name; + } + + this.grad_fn = internal_options.operation ?? null; + + if (this.requires_grad && !this.grad_fn) { + const acc = new AccumulateGrad(); + acc.variable = this; + this.grad_fn = acc; + } + } + + size(dim?: number): number | number[] { + if (dim !== undefined) { + if (dim < 0) { + dim += this.shape.length; + } + if (dim < 0 || dim >= this.shape.length) { + throw new Error( + `Dimension out of range (expected to be in range of [${-this.shape.length}, ${this.shape.length - 1}], but got ${dim})` + ); + } + return this.shape[dim]; + } + return this.shape; + } + + toArray_(): void { + return; + } + + toFlatArray(): number[] { + return this.data; + } + + toArray(): NestedNumberArray { + if (this.shape.length === 0) { + return this.data[0]; + } + + let flatIndex = 0; + const flatData = this.data; + + const buildDimension = (currentDim: number): NestedNumberArray => { + const size = this.shape[currentDim]; + const result = new Array(size); + const isLastDimension = currentDim === this.shape.length - 1; + + for (let i = 0; i < size; i++) { + if (isLastDimension) { + result[i] = flatData[flatIndex++]; + } else { + result[i] = buildDimension(currentDim + 1); + } + } + + return result; + }; + + return buildDimension(0); + } + + toString(): string { + let extra = ''; + if (this.name) { + extra += `, name="${this.name}"`; + } + if (this.dataLength() == 0 && this.shape.length > 0) { + extra += `, size=(${this.shape.join(', ')})`; + } + if (this.requires_grad) { + extra += ', requires_grad=True'; + } + + function formatNum(val: number): string { + return String(Math.round(val * 1e4) / 1e4); + } + + function formatArray(val: unknown): string { + if (Array.isArray(val)) { + return "[" + val.map(formatArray).join(", ") + "]"; + } + if (typeof val === "number") { + return formatNum(val); + } + return String(val); + } + + return `tensor(${formatArray(this.toArray())}${extra})`; + } + + dataLength(): number { + if (this.shape.length === 0) return 1; + return this.shape.reduce((a, b) => a * b, 1); + } + + private _executeUnaryOp(opName: string): Tensor { + const operation = resultRequiresGrad(this) + ? createOperation(opName) + : getOperationCache(opName); + return operation.forward(this); + } + + private _executeBinaryOp(opName: string, other: Tensor | number): Tensor { + if (typeof other == 'number') { + other = new Tensor(other); + } + const operation = resultRequiresGrad(this, other) + ? createOperation(opName) + : getOperationCache(opName); + return operation.forward(this, other); + } + + private _executeOpRaw(opName: string, ...args: any[]): Tensor { + const operation = createOperation(opName); + return operation.forward(this, ...args); + } + + item(): number { + if (this.dataLength() !== 1) { + throw new Error('Tensor.item() is only valid for scalars'); + } + return this.data[0]; + } + + detach(): Tensor { + return new Tensor(this.data, { requires_grad: false }, { shape: this.shape }); + } + + detach_(): void { + this.requires_grad = false; + this.grad = null; + this.grad_fn = null; + } + + zero_(): void { + this.data = Array(this.dataLength()).fill(0); + } + + private is_retain_grad: boolean = false; + retain_grad(): void { + // leaf node -> no-op + if (this.grad_fn instanceof AccumulateGrad) return; + if (this.is_retain_grad) return; + this.is_retain_grad = true; + + this.grad_fn._retained_tensors.push(this); + } + + backward(grad?: Tensor | null): void { + if (!this.requires_grad) { + return; + } + + if (!grad) { + if (this.dataLength() !== 1) { + throw new Error('Gradient is required for non-scalar tensors'); + } + grad = new Tensor(1); + } else { + grad.toArray_(); + } + + if (this.grad_fn) { + eventBus.dispatchEvent( + new CustomEvent(events.TENSOR_BEFORE_BACKWARD, { detail: { tensor: this } }) + ); + this.grad_fn.backward(grad); + eventBus.dispatchEvent( + new CustomEvent(events.TENSOR_AFTER_BACKWARD, { detail: { tensor: this } }) + ); + } + } + + /** + * Returns a view of this tensor along dimension 0. + * + * The returned tensor shares the same underlying TensorStorage — mutations + * to either tensor (via zero_(), the data setter, or the optimizer) are + * immediately visible in the other. + * + * Supports negative indices (e.g. index(-1) is the last row). + * + * Note: the view does not carry a grad_fn; autograd does not propagate + * through index() at this time. + */ + index(i: number): Tensor { + if (this.shape.length === 0) { + throw new Error('Cannot index a scalar tensor'); + } + if (i < 0) { + i += this.shape[0]; + } + if (i < 0 || i >= this.shape[0]) { + throw new Error( + `Index ${i} out of bounds for dimension 0 with size ${this.shape[0]}` + ); + } + const newShape = this.shape.slice(1); + // Number of elements per row along dim 0. + const rowSize = newShape.length === 0 ? 1 : newShape.reduce((a, b) => a * b, 1); + const newOffset = this._offset + i * rowSize; + return new Tensor([], {}, { shape: newShape, _storage: this._storage, _offset: newOffset }); + } + + // operations + + // binary pointwise + + add(other: Tensor | number): Tensor { + return this._executeBinaryOp('add', other); + } + + sub(other: Tensor | number): Tensor { + return this._executeBinaryOp('sub', other); + } + + mul(other: Tensor | number): Tensor { + return this._executeBinaryOp('mul', other); + } + + div(other: Tensor | number): Tensor { + return this._executeBinaryOp('div', other); + } + + pow(other: Tensor | number): Tensor { + if (typeof other == 'number' && other % 1 === 0) { + return this._executeOpRaw('powint', other); + } + return this._executeBinaryOp('pow', other); + } + + fmod(other: Tensor | number): Tensor { + return this._executeBinaryOp('fmod', other); + } + + maximum(other: Tensor | number): Tensor { + return this._executeBinaryOp('maximum', other); + } + + minimum(other: Tensor | number): Tensor { + return this._executeBinaryOp('minimum', other); + } + + // unary pointwise + + log(): Tensor { + return this._executeUnaryOp('log'); + } + + sqrt(): Tensor { + return this._executeUnaryOp('sqrt'); + } + + exp(): Tensor { + return this._executeUnaryOp('exp'); + } + + square(): Tensor { + return this._executeUnaryOp('square'); + } + + abs(): Tensor { + return this._executeUnaryOp('abs'); + } + + sign(): Tensor { + return this._executeUnaryOp('sign'); + } + + neg(): Tensor { + return this._executeUnaryOp('neg'); + } + + reciprocal(): Tensor { + return this._executeUnaryOp('reciprocal'); + } + + nan_to_num(): Tensor { + return this._executeUnaryOp('nan_to_num'); + } + + reshape(shape: number[]): Tensor { + return this._executeOpRaw('reshape', shape); + } + + flatten(start_dim: number = 0, end_dim: number = -1): Tensor { + return this._executeOpRaw('flatten', start_dim, end_dim); + } + + squeeze(dim: number): Tensor { + return this._executeOpRaw('squeeze', dim); + } + + unsqueeze(dim: number): Tensor { + return this._executeOpRaw('unsqueeze', dim); + } + + expand(sizes: number[]): Tensor { + return this._executeOpRaw('expand', sizes); + } + + // trigonometric + + sin(): Tensor { + return this._executeUnaryOp('sin'); + } + + cos(): Tensor { + return this._executeUnaryOp('cos'); + } + + tan(): Tensor { + return this._executeUnaryOp('tan'); + } + + // reduction + + sum(dim?: number | number[], keepdim: boolean = false): Tensor { + return this._executeOpRaw('sum', dim, keepdim); + } + + mean(dim?: number | number[], keepdim: boolean = false): Tensor { + return this._executeOpRaw('mean', dim, keepdim); + } + + max(dim?: number | number[], keepdim: boolean = false): Tensor { + return this._executeOpRaw('max', dim, keepdim); + } + + min(dim?: number | number[], keepdim: boolean = false): Tensor { + return this._executeOpRaw('min', dim, keepdim); + } + + // linalg + + transpose(dim0: number, dim1: number): Tensor { + return this._executeOpRaw('transpose', dim0, dim1); + } + + matmul(other: Tensor): Tensor { + return this._executeBinaryOp('matmul', other); + } + + // comparison + + lt(other: Tensor | number): Tensor { + return this._executeBinaryOp('lt', other); + } + + gt(other: Tensor | number): Tensor { + return this._executeBinaryOp('gt', other); + } + + le(other: Tensor | number): Tensor { + return this._executeBinaryOp('le', other); + } + + ge(other: Tensor | number): Tensor { + return this._executeBinaryOp('ge', other); + } + + eq(other: Tensor | number): Tensor { + return this._executeBinaryOp('eq', other); + } + + ne(other: Tensor | number): Tensor { + return this._executeBinaryOp('ne', other); + } + + allclose( + other: Tensor, + rtol: number = 1e-5, + atol: number = 1e-8, + equal_nan: boolean = false + ): boolean { + const thisData = this.data; + const otherData = other.data; + if (thisData.length !== otherData.length) return false; + for (let i = 0; i < thisData.length; i++) { + const av = thisData[i], + bv = otherData[i]; + if (equal_nan && isNaN(av) && isNaN(bv)) continue; + if (isNaN(av) || isNaN(bv)) return false; + if (Math.abs(av - bv) > atol + rtol * Math.abs(bv)) return false; + } + return true; + } + + numel(): number { + return this.dataLength(); + } + + // other + + sigmoid(): Tensor { + return this._executeUnaryOp('sigmoid'); + } + + tanh(): Tensor { + return this._executeUnaryOp('tanh'); + } + + relu(): Tensor { + return this._executeUnaryOp('relu'); + } + + softmax(dim: number): Tensor { + return this._executeOpRaw('softmax', dim); + } + + clamp(min: number, max: number): Tensor { + return this._executeOpRaw('clamp', min, max); + } + + cat(tensors: Tensor | Tensor[], dim: number = 0): Tensor { + const others = Array.isArray(tensors) ? tensors : [tensors]; + return createOperation('cat').forward([this, ...others], dim); + } + + concatenate(tensors: Tensor | Tensor[], dim: number = 0): Tensor { + return this.cat(tensors, dim); + } + + concat(tensors: Tensor | Tensor[], dim: number = 0): Tensor { + return this.cat(tensors, dim); + } +} + +// --------------------------------------------------------------------------- +// Typed tensor constructors +// --------------------------------------------------------------------------- + +function _truncate_nested(data: NestedNumberArray): NestedNumberArray { + if (typeof data === 'number') return Math.trunc(data); + if (Array.isArray(data)) return (data as NestedNumberArray[]).map(_truncate_nested); + // TypedArray + const out = new Float64Array((data as Float64Array).length); + for (let i = 0; i < out.length; i++) out[i] = Math.trunc((data as Float64Array)[i]); + return out; +} + +/** + * A Tensor that stores 32-bit float values (same as the default Tensor). + * Provided for PyTorch API compatibility. + */ +export class FloatTensor extends Tensor { + constructor(data: NestedNumberArray, options: { requires_grad?: boolean } = {}) { + super(data, options); + } +} + +/** + * A Tensor whose values are truncated to integers (64-bit integer semantics). + * Negative numbers are truncated toward zero: LongTensor([-1.7]) -> tensor([-1]). + */ +export class LongTensor extends Tensor { + constructor(data: NestedNumberArray, options: { requires_grad?: boolean } = {}) { + super(_truncate_nested(data), options); + } +} diff --git a/src/util.ts b/src/util.ts new file mode 100644 index 00000000..4d350fdc --- /dev/null +++ b/src/util.ts @@ -0,0 +1,29 @@ +let globalId = 0; + +export const getNextId = () => { + return globalId++; +}; + +export const eventBus = new EventTarget(); +export const events = { + TENSOR_BEFORE_BACKWARD: 'tensor.beforeBackward', + TENSOR_AFTER_BACKWARD: 'tensor.afterBackward', + OPERATION_BEFORE_FORWARD: 'operation.beforeForward', + OPERATION_AFTER_FORWARD: 'operation.afterForward', + OPERATION_BEFORE_BACKWARD: 'operation.beforeBackward', + OPERATION_AFTER_BACKWARD: 'operation.afterBackward', + OPERATION_BEFORE_ACCUMULATE_GRAD: 'operation.beforeAccumulateGrad', + OPERATION_AFTER_ACCUMULATE_GRAD: 'operation.afterAccumulateGrad' +}; + +export function _numel(shape: number[]): number { + return shape.reduce((a, b) => a * b, 1); +} + +export function _get_shape_from_args(args: number[] | number[][]): number[] { + if (Array.isArray(args[0])) { + return args[0]; + } + + return args as number[]; +} diff --git a/test/backward.test.js b/test/backward.test.js new file mode 100644 index 00000000..d66171b3 --- /dev/null +++ b/test/backward.test.js @@ -0,0 +1,88 @@ +import { assert } from 'chai'; +import { Tensor } from 'torch'; + +describe('Autograd', () => { + it('y = x**2, dy/dx at x=2 is 4', () => { + const x = new Tensor([2.0], { requires_grad: true }); + const y = x.pow(new Tensor([2.0])); + assert.strictEqual(y.item(), 4.0); + y.backward(); + assert.strictEqual(x.grad?.item(), 4.0); + }); + + it('y = 2*x, dy/dx at x=2 is 2', () => { + const x = new Tensor([2.0], { requires_grad: true }); + const y = x.mul(new Tensor(2.0)); + assert.strictEqual(y.item(), 4.0); + y.backward(); + assert.strictEqual(x.grad?.item(), 2.0); + }); + + it('z = x + y, dz/dx at x=2, y=3 is 1', () => { + const x = new Tensor([2.0], { requires_grad: true }); + const y = new Tensor([3.0], { requires_grad: true }); + const z = x.add(y); + assert.strictEqual(z.item(), 5.0); + z.backward(); + assert.strictEqual(x.grad?.item(), 1.0); + assert.strictEqual(y.grad?.item(), 1.0); + }); + + it('y = x**2 + 2*x (seperated operations), dy/dx at x=2 is 6', () => { + const x = new Tensor([2.0], { requires_grad: true }); + + const y1 = x.pow(new Tensor(2.0)); + const y2 = x.mul(new Tensor(2.0)); + + const y = y1.add(y2); + + assert.strictEqual(y.item(), 8.0); + assert.strictEqual(y1.item(), 4.0); + assert.strictEqual(y2.item(), 4.0); + + y.backward(); + + assert.strictEqual(x.grad?.item(), 6.0); + assert.strictEqual(x.item(), 2.0); + }); + + it('y = x**2 + 2*x (combined operations), dy/dx at x=2 is 6', () => { + const x = new Tensor([2.0], { requires_grad: true }); + const y = x.pow(new Tensor(2.0)).add(x.mul(new Tensor(2.0))); + assert.strictEqual(y.item(), 8.0); + y.backward(); + assert.strictEqual(x.grad?.item(), 6.0); + }); + + it('Intermediate tensors do not store grad by default', () => { + const x = new Tensor([2.0], { requires_grad: true }); + + const y1 = x.pow(new Tensor(2.0)); + const y2 = x.mul(new Tensor(2.0)); + + const y = y1.add(y2); + + y.backward(); + + // Intermediate gradients are not retained by default + assert.strictEqual(y1.grad, null); + assert.strictEqual(y2.grad, null); + }); + + it('Intermediate tensors store grads after retain_grad', () => { + const x = new Tensor([2.0], { requires_grad: true }); + + const y1 = x.pow(new Tensor(2.0)); + const y2 = x.mul(new Tensor(2.0)); + + const y = y1.add(y2); + + y1.retain_grad(); + y2.retain_grad(); + + y.backward(); + + assert.strictEqual(y1.grad.item(), 1.0); + assert.strictEqual(y2.grad.item(), 1.0); + }); +}); diff --git a/test/broadcast.test.js b/test/broadcast.test.js new file mode 100644 index 00000000..524a2a81 --- /dev/null +++ b/test/broadcast.test.js @@ -0,0 +1,32 @@ +import { assert } from 'chai'; +import { Tensor, add, __left_index__, __right_index__ } from 'torch'; + +describe('Broadcast Index', () => { + it('Broadcast indices should be correct in 2d array', () => { + const arr = new Tensor([ + [0, 0], + [0, 0], + [0, 0] + ]); + + assert.deepStrictEqual(__left_index__(arr, arr).toFlatArray(), [0, 1, 2, 3, 4, 5]); + assert.deepStrictEqual(__right_index__(arr, arr).toFlatArray(), [0, 1, 2, 3, 4, 5]); + }); + + it('Should get correct value of left and right index', () => { + const arr = new Tensor([ + [1, 2], + [3, 4], + [5, 6] + ]); + + const zeros = new Tensor([ + [0, 0], + [0, 0], + [0, 0] + ]) + + assert.deepStrictEqual(add(arr, zeros).toFlatArray(), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(add(zeros, arr).toFlatArray(), [1, 2, 3, 4, 5, 6]); + }); +}); diff --git a/test/broadcast.test.ts b/test/broadcast.test.ts new file mode 100644 index 00000000..0dae3a00 --- /dev/null +++ b/test/broadcast.test.ts @@ -0,0 +1,22 @@ +import { assert } from 'chai'; +import { _broadcast_shape } from '../src/broadcasting'; + +describe('Broadcasting', () => { + it('Broadcast shape of same dimensions', () => { + assert.deepStrictEqual(_broadcast_shape([1, 3, 1], [4, 3, 6]), [4, 3, 6]); + assert.deepStrictEqual(_broadcast_shape([4, 3, 6], [1, 3, 1]), [4, 3, 6]); + assert.deepStrictEqual(_broadcast_shape([2, 2, 2], [2, 2, 2]), [2, 2, 2]); + + assert.throws(() => _broadcast_shape([1, 2, 1], [1, 3, 1]), Error); + }); + + it('Broadcast shape of different dimensions', () => { + assert.deepStrictEqual(_broadcast_shape([2, 3, 1], [3, 1]), [2, 3, 1]); + assert.deepStrictEqual(_broadcast_shape([3, 1], [2, 3, 1]), [2, 3, 1]); + assert.deepStrictEqual(_broadcast_shape([1], [2, 2, 2]), [2, 2, 2]); + assert.deepStrictEqual(_broadcast_shape([2], []), [2]); + assert.deepStrictEqual(_broadcast_shape([1, 4], [3, 3, 1]), [3, 3, 4]); + + assert.throws(() => _broadcast_shape([1, 2], [3, 3, 6]), Error); + }); +}); diff --git a/test/chai-esm.js b/test/chai-esm.js new file mode 100644 index 00000000..8104557e --- /dev/null +++ b/test/chai-esm.js @@ -0,0 +1,4 @@ +// Wrap global UMD chai as an ES module export +const { assert, expect, should, use, config } = window.chai; +export { assert, expect, should, use, config }; +export default window.chai; diff --git a/test/creation.test.ts b/test/creation.test.ts new file mode 100644 index 00000000..3641f9f1 --- /dev/null +++ b/test/creation.test.ts @@ -0,0 +1,382 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; +import { Tensor } from 'torch'; + +describe('Creation Functions', () => { + describe('tensor', () => { + it('should create a scalar tensor', () => { + const t = torch.tensor(5); + assert.deepStrictEqual(t.toArray(), 5); + assert.deepStrictEqual(t.shape, []); + }); + + it('should create a tensor with 1D array', () => { + const t = torch.tensor([1, 2, 3]); + assert.deepStrictEqual(t.toArray(), [1, 2, 3]); + assert.deepStrictEqual(t.shape, [3]); + }); + + it('should create a tensor with nested 2D array', () => { + const t = torch.tensor([ + [1, 2], + [3, 4] + ]); + assert.deepStrictEqual(t.toArray(), [ + [1, 2], + [3, 4] + ]); + assert.deepStrictEqual(t.shape, [2, 2]); + }); + + it('should create a tensor with nested 3D array', () => { + const t = torch.tensor([ + [ + [1, 2], + [3, 4] + ], + [ + [5, 6], + [7, 8] + ] + ]); + assert.deepStrictEqual(t.toArray(), [ + [ + [1, 2], + [3, 4] + ], + [ + [5, 6], + [7, 8] + ] + ]); + assert.deepStrictEqual(t.shape, [2, 2, 2]); + }); + + it('should create empty tensors', () => { + const t = torch.tensor([]); + assert.deepStrictEqual(t.toArray(), []); + assert.deepStrictEqual(t.shape, [0]); + + const t2 = torch.tensor([[]]); + assert.deepStrictEqual(t2.toArray(), [[]]); + assert.deepStrictEqual(t2.shape, [1, 0]); + + const t3 = torch.tensor([[], []]); + assert.deepStrictEqual(t3.toArray(), [[], []]); + assert.deepStrictEqual(t3.shape, [2, 0]); + }); + + it('should not create tensor with inconsistent shape', () => { + assert.throws(() => torch.tensor([[1, 2], [3]])); + assert.throws(() => torch.tensor([1, [2, 3]])); + assert.throws(() => + torch.tensor([ + [1, 2], + [3, 4, 5] + ]) + ); + assert.throws(() => + torch.tensor([ + [ + [1, 2], + [3, 4] + ], + [[5, 6]] + ]) + ); + }); + }); + describe('ones', () => { + it('should create a 1D tensor of ones', () => { + const t = torch.ones(5); + assert.deepStrictEqual(t.toArray(), [1, 1, 1, 1, 1]); + assert.deepStrictEqual(t.shape, [5]); + }); + + it('should create a 2D tensor of ones', () => { + const t = torch.ones(2, 3); + assert.deepStrictEqual(t.toArray(), [ + [1, 1, 1], + [1, 1, 1] + ]); + assert.deepStrictEqual(t.shape, [2, 3]); + }); + + it('should create a 3D tensor of ones', () => { + const t = torch.ones(2, 3, 4); + assert.deepStrictEqual(t.shape, [2, 3, 4]); + assert.strictEqual(t.toFlatArray().length, 24); + assert.isTrue(t.toFlatArray().every((v: number) => v === 1)); + }); + + it('should accept shape as an array', () => { + const t = torch.ones([3, 2]); + assert.deepStrictEqual(t.shape, [3, 2]); + assert.isTrue(t.toFlatArray().every((v: number) => v === 1)); + }); + }); + + describe('zeros', () => { + it('should create a 1D tensor of zeros', () => { + const t = torch.zeros(4); + assert.deepStrictEqual(t.toArray(), [0, 0, 0, 0]); + assert.deepStrictEqual(t.shape, [4]); + }); + + it('should create a 2D tensor of zeros', () => { + const t = torch.zeros(3, 2); + assert.deepStrictEqual(t.toArray(), [ + [0, 0], + [0, 0], + [0, 0] + ]); + assert.deepStrictEqual(t.shape, [3, 2]); + }); + + it('should accept shape as an array', () => { + const t = torch.zeros([2, 4]); + assert.deepStrictEqual(t.shape, [2, 4]); + assert.isTrue(t.toFlatArray().every((v: number) => v === 0)); + }); + }); + + describe('ones_like', () => { + it('should create a tensor of ones with the same shape', () => { + const original = new Tensor([ + [1, 2, 3], + [4, 5, 6] + ]); + const t = torch.ones_like(original); + assert.deepStrictEqual(t.shape, [2, 3]); + assert.isTrue(t.toFlatArray().every((v: number) => v === 1)); + }); + }); + + describe('zeros_like', () => { + it('should create a tensor of zeros with the same shape', () => { + const original = new Tensor([ + [1, 2], + [3, 4], + [5, 6] + ]); + const t = torch.zeros_like(original); + assert.deepStrictEqual(t.shape, [3, 2]); + assert.isTrue(t.toFlatArray().every((v: number) => v === 0)); + }); + }); + + describe('rand', () => { + it('should create a tensor with the correct shape', () => { + const t = torch.rand(3, 4); + assert.deepStrictEqual(t.shape, [3, 4]); + assert.strictEqual(t.toFlatArray().length, 12); + }); + + it('should create values in [0, 1)', () => { + const t = torch.rand(10); + for (const v of t.toFlatArray()) { + assert.isAtLeast(v, 0); + assert.isBelow(v, 1); + } + }); + }); + + describe('randn', () => { + it('should create a tensor with the correct shape', () => { + const t = torch.randn(2, 5); + assert.deepStrictEqual(t.shape, [2, 5]); + assert.strictEqual(t.toFlatArray().length, 10); + }); + }); + + describe('randint', () => { + it('should create a tensor with the correct shape', () => { + const t = torch.randint(0, 10, [3, 3]); + assert.deepStrictEqual(t.shape, [3, 3]); + assert.strictEqual(t.toFlatArray().length, 9); + }); + + it('should create integer values in [low, high)', () => { + const t = torch.randint(5, 15, [20]); + for (const v of t.toFlatArray()) { + assert.isAtLeast(v, 5); + assert.isBelow(v, 15); + assert.strictEqual(v, Math.floor(v)); + } + }); + }); + + describe('linspace', () => { + it('should create evenly spaced values', () => { + const t = torch.linspace(0, 1, 5); + const expected = [0, 0.25, 0.5, 0.75, 1.0]; + assert.deepStrictEqual(t.shape, [5]); + const arr = t.toFlatArray(); + for (let i = 0; i < expected.length; i++) { + assert.closeTo(arr[i], expected[i], 1e-6); + } + }); + + it('should handle negative range', () => { + const t = torch.linspace(-1, 1, 3); + const arr = t.toFlatArray(); + assert.closeTo(arr[0], -1, 1e-6); + assert.closeTo(arr[1], 0, 1e-6); + assert.closeTo(arr[2], 1, 1e-6); + }); + }); + + describe('arange', () => { + it('should create a range of values', () => { + const t = torch.arange(0, 5); + assert.deepStrictEqual(t.toArray(), [0, 1, 2, 3, 4]); + }); + + it('should support custom step', () => { + const t = torch.arange(0, 10, 2); + assert.deepStrictEqual(t.toArray(), [0, 2, 4, 6, 8]); + }); + + it('should support negative step values with appropriate range', () => { + const t = torch.arange(1, 5, 0.5); + assert.deepStrictEqual(t.shape, [8]); + const arr = t.toFlatArray(); + assert.closeTo(arr[0], 1.0, 1e-6); + assert.closeTo(arr[7], 4.5, 1e-6); + }); + }); + + describe('empty', () => { + it('should create a tensor with the correct shape', () => { + const t = torch.empty(3, 4); + assert.deepStrictEqual(t.shape, [3, 4]); + assert.strictEqual(t.toFlatArray().length, 12); + }); + + it('should accept shape as an array', () => { + const t = torch.empty([2, 3]); + assert.deepStrictEqual(t.shape, [2, 3]); + }); + }); + + describe('empty_like', () => { + it('should create a tensor with the same shape', () => { + const original = new Tensor([[1, 2, 3], [4, 5, 6]]); + const t = torch.empty_like(original); + assert.deepStrictEqual(t.shape, [2, 3]); + }); + }); + + describe('full', () => { + it('should create a tensor filled with a value', () => { + const t = torch.full([2, 3], 7); + assert.deepStrictEqual(t.shape, [2, 3]); + assert.isTrue(t.toFlatArray().every((v: number) => v === 7)); + }); + + it('should create a 1D tensor filled with a value', () => { + const t = torch.full([5], 3.14); + assert.deepStrictEqual(t.shape, [5]); + for (const v of t.toFlatArray()) { + assert.closeTo(v, 3.14, 1e-6); + } + }); + }); + + describe('full_like', () => { + it('should create a tensor filled with a value with the same shape', () => { + const original = new Tensor([[1, 2], [3, 4]]); + const t = torch.full_like(original, 42); + assert.deepStrictEqual(t.shape, [2, 2]); + assert.isTrue(t.toFlatArray().every((v: number) => v === 42)); + }); + }); + + describe('rand_like', () => { + it('should create a tensor with the same shape and values in [0, 1)', () => { + const original = new Tensor([[1, 2, 3], [4, 5, 6]]); + const t = torch.rand_like(original); + assert.deepStrictEqual(t.shape, [2, 3]); + for (const v of t.toFlatArray()) { + assert.isAtLeast(v, 0); + assert.isBelow(v, 1); + } + }); + }); + + describe('randn_like', () => { + it('should create a tensor with the same shape', () => { + const original = new Tensor([[1, 2], [3, 4]]); + const t = torch.randn_like(original); + assert.deepStrictEqual(t.shape, [2, 2]); + assert.strictEqual(t.toFlatArray().length, 4); + }); + }); + + describe('randint_like', () => { + it('should create a tensor with the same shape and integer values in [low, high)', () => { + const original = new Tensor([[1, 2, 3], [4, 5, 6]]); + const t = torch.randint_like(original, 0, 10); + assert.deepStrictEqual(t.shape, [2, 3]); + for (const v of t.toFlatArray()) { + assert.isAtLeast(v, 0); + assert.isBelow(v, 10); + assert.strictEqual(v, Math.floor(v)); + } + }); + }); + + describe('is_tensor', () => { + it('should return true for tensors', () => { + const t = torch.tensor([1, 2, 3]); + assert.isTrue(torch.is_tensor(t)); + }); + + it('should return false for non-tensors', () => { + assert.isFalse(torch.is_tensor(5)); + assert.isFalse(torch.is_tensor([1, 2, 3])); + assert.isFalse(torch.is_tensor('hello')); + assert.isFalse(torch.is_tensor(null)); + }); + }); + + describe('is_nonzero', () => { + it('should return true for non-zero scalar tensor', () => { + assert.isTrue(torch.is_nonzero(torch.tensor(5))); + assert.isTrue(torch.is_nonzero(torch.tensor(-1))); + assert.isTrue(torch.is_nonzero(torch.tensor(0.001))); + }); + + it('should return false for zero scalar tensor', () => { + assert.isFalse(torch.is_nonzero(torch.tensor(0))); + }); + + it('should throw for multi-element tensor', () => { + assert.throws(() => torch.is_nonzero(torch.tensor([1, 2]))); + }); + }); + + describe('numel', () => { + it('should return the total number of elements', () => { + assert.strictEqual(torch.numel(torch.tensor([1, 2, 3])), 3); + assert.strictEqual(torch.numel(torch.tensor([[1, 2], [3, 4]])), 4); + assert.strictEqual(torch.numel(torch.tensor(5)), 1); + }); + }); + + describe('seed', () => { + it('random numbers should be different', () => { + const t1 = torch.rand(5); + const t2 = torch.rand(5); + assert.notDeepEqual(t1.toArray(), t2.toArray()); + }); + + it('manual_seed should seed the random number generator', () => { + torch.manual_seed(123); + const t1 = torch.rand(5); + torch.manual_seed(123); + const t2 = torch.rand(5); + assert.deepStrictEqual(t1.toArray(), t2.toArray()); + }); + }); +}); diff --git a/test/custom_operations.test.js b/test/custom_operations.test.js new file mode 100644 index 00000000..12773947 --- /dev/null +++ b/test/custom_operations.test.js @@ -0,0 +1,118 @@ +import { assert } from 'chai'; +import { Tensor } from 'torch'; + +describe('Custom Operations', () => { + + describe('Matmul', () => { + it('should perform matrix multiplication on 2D tensors (1)', () => { + const t1 = new Tensor([ + [1, 2, 3], + [4, 5, 6] + ]); + const t2 = new Tensor([ + [7, 8], + [9, 1], + [2, 3] + ]); + + const result = t1.matmul(t2); + assert.deepStrictEqual(result.shape, [2, 2]); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [31, 19, 85, 55]); + }); + + it('should perform matrix multiplication on 2D tensors (2)', () => { + const t1 = new Tensor([ + [1, 2, 3], + [4, 5, 6] + ]); + + const t2 = new Tensor([ + [9, 9, 1], + [6, 4, 3], + [5, 5, 6] + ]); + + const result = t1.matmul(t2); + + const expected = [36, 32, 25, 96, 86, 55]; + + assert.deepStrictEqual(Array.from(result.toFlatArray()), expected); + assert.deepStrictEqual(result.shape, [2, 3]); + }); + + it('should perform dot product on 1D tensors', () => { + const t1 = new Tensor([1, 2, 3]); + const t2 = new Tensor([4, 5, 6]); + + const result = t1.matmul(t2); + assert.deepStrictEqual(result.shape, []); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [32]); + }); + + it('should handle batch matrix multiplication', () => { + const t1 = new Tensor([ + [[62, 50], [7, 53]], + [[5, 48], [63, 94]] + ]); + const t2 = new Tensor([ + [[98, 3], [59, 81]], + [[79, 74], [41, 98]] + ]); + + const result = t1.matmul(t2); + assert.deepStrictEqual(result.shape, [2, 2, 2]); + + const data = Array.from(result.toFlatArray()); + assert.deepStrictEqual(data, [9026, 4236, 3813, 4314, 2363, 5074, 8831, 13874]); + }); + + it('should calculate backward correctly for matmul', () => { + const t1 = new Tensor([ + [1, 2], + [3, 4] + ], { requires_grad: true }); + + const t2 = new Tensor([ + [5, 6], + [7, 8] + ], { requires_grad: true }); + + const result = t1.matmul(t2); + result.sum().backward(); + + const t1Grad = Array.from(t1.grad.toFlatArray()); + assert.deepStrictEqual(t1Grad, [11, 15, 11, 15]); + + const t2Grad = Array.from(t2.grad.toFlatArray()); + assert.deepStrictEqual(t2Grad, [4, 4, 6, 6]); + }); + }); + + describe('Transpose', () => { + it('should transpose a 2D tensor', () => { + const t = new Tensor([ + [1, 2, 3], + [4, 5, 6] + ]); + + const result = t.transpose(0, 1); + assert.deepStrictEqual(result.shape, [3, 2]); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 4, 2, 5, 3, 6]); + }); + + it('should transpose dimensions in a 3D tensor', () => { + const t = new Tensor([ + [[1, 2], [3, 4]], + [[5, 6], [7, 8]] + ]); + + const result = t.transpose(1, 2); + assert.deepStrictEqual(result.shape, [2, 2, 2]); + const data = Array.from(result.toFlatArray()); + assert.deepStrictEqual(data, [1, 3, 2, 4, 5, 7, 6, 8]); + }); + + + }); + +}); diff --git a/test/event_listener.test.js b/test/event_listener.test.js new file mode 100644 index 00000000..4a2aa2a1 --- /dev/null +++ b/test/event_listener.test.js @@ -0,0 +1,95 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; + +describe('Event Bus', () => { + const a = new torch.Tensor([1, 2, 3], { requires_grad: true }); + const b = new torch.Tensor([4, 5, 6], { requires_grad: true }); + const ab = a.add(b); + const sum = ab.sum(); + + describe('tensor.beforeBackward', () => { + it('should dispatch event before backward', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.TENSOR_BEFORE_BACKWARD, () => { + eventDispatched = true; + }); + sum.backward(); + assert.isTrue(eventDispatched); + }); + }); + + describe('tensor.afterBackward', () => { + it('should dispatch event after backward', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.TENSOR_AFTER_BACKWARD, () => { + eventDispatched = true; + }); + sum.backward(); + assert.isTrue(eventDispatched); + }); + }); + + describe('operation.beforeForward', () => { + it('should dispatch event before forward', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.OPERATION_BEFORE_FORWARD, () => { + eventDispatched = true; + }); + const c = a.add(b); + assert.isTrue(eventDispatched); + }); + }); + + describe('operation.afterForward', () => { + it('should dispatch event after forward', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.OPERATION_AFTER_FORWARD, () => { + eventDispatched = true; + }); + const c = a.add(b); + assert.isTrue(eventDispatched); + }); + }); + + describe('operation.beforeBackward', () => { + it('should dispatch event before backward', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.OPERATION_BEFORE_BACKWARD, () => { + eventDispatched = true; + }); + sum.backward(); + assert.isTrue(eventDispatched); + }); + }); + + describe('operation.afterBackward', () => { + it('should dispatch event after backward', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.OPERATION_AFTER_BACKWARD, () => { + eventDispatched = true; + }); + sum.backward(); + assert.isTrue(eventDispatched); + }); + }); + + describe('operation.accumulateGrad', () => { + it('should dispatch event before accumulateGrad', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.OPERATION_BEFORE_ACCUMULATE_GRAD, () => { + eventDispatched = true; + }); + sum.backward(); + assert.isTrue(eventDispatched); + }); + + it('should dispatch event after accumulateGrad', () => { + let eventDispatched = false; + torch.eventBus.addEventListener(torch.events.OPERATION_AFTER_ACCUMULATE_GRAD, () => { + eventDispatched = true; + }); + sum.backward(); + assert.isTrue(eventDispatched); + }); + }); +}); diff --git a/test/export.test.ts b/test/export.test.ts new file mode 100644 index 00000000..167aba49 --- /dev/null +++ b/test/export.test.ts @@ -0,0 +1,23 @@ +import { assert } from 'chai'; +import { _atenMap } from '../src/export'; +import { _getAllOperationNames } from '../src/functions/registry'; + +// Import torch to trigger operation registration side effects +import 'torch'; + +describe('Export', () => { + // List from https://docs.pytorch.org/docs/2.10/user_guide/torch_compiler/torch.compiler_ir.html + + const EXCLUDED_OPERATIONS = new Set([ + "__left_index__", + "__right_index__", + ]); + + it('has all supported operations', () => { + for (const opName of _getAllOperationNames()) { + assert.isTrue(opName in _atenMap || EXCLUDED_OPERATIONS.has(opName), + `Missing aten mapping for operation: ${opName}. ` + + `Please add it in export.ts or exclude it in export.test.ts`); + } + }); +}); diff --git a/test/functional.test.js b/test/functional.test.js new file mode 100644 index 00000000..d2091942 --- /dev/null +++ b/test/functional.test.js @@ -0,0 +1,239 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; +import { Tensor } from 'torch'; + +describe('Functional', () => { + describe('Addition', () => { + it('should add two tensors with same shape', () => { + const t1 = new Tensor([10]); + const t2 = new Tensor([20]); + const result = torch.add(t1, t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [30]); + assert.deepStrictEqual(result.shape, [1]); + }); + + it('should add tensors with different shapes (broadcasting)', () => { + const t1 = new Tensor([10, 20, 30]); + const t2 = new Tensor([1]); + const result = torch.add(t1, t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [11, 21, 31]); + assert.deepStrictEqual(result.shape, [3]); + }); + + it('should add two 1D tensors of same length', () => { + const t1 = new Tensor([1, 2, 3]); + const t2 = new Tensor([4, 5, 6]); + const result = torch.add(t1, t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [5, 7, 9]); + assert.deepStrictEqual(result.shape, [3]); + }); + }); + + describe('Multiplication', () => { + it('should multiply two tensors with same shape', () => { + const t1 = new Tensor([10]); + const t2 = new Tensor([20]); + const result = torch.mul(t1, t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [200]); + assert.deepStrictEqual(result.shape, [1]); + }); + }); + + describe('should multiply tensors with different shapes (broadcasting)', () => { + it('should multiply two tensors with different shapes (broadcasting)', () => { + const t1 = new Tensor([10, 20, 30]); + const t2 = new Tensor([1]); + const result = torch.mul(t1, t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [10, 20, 30]); + assert.deepStrictEqual(result.shape, [3]); + }); + }); + + describe('Matrix Multiplication', () => { + it('should multiply two tensors with shape (1)', () => { + const t1 = new Tensor([10]); + const t2 = new Tensor([20]); + const result = torch.matmul(t1, t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [200]); + assert.deepStrictEqual(result.shape, []); + }); + + it('should multiply two tensors with 1 dim', () => { + const t1 = new Tensor([1, 2, 3, 4]); + const t2 = new Tensor([5, 6, 7, 8]); + const result = torch.matmul(t1, t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [70]); + assert.deepStrictEqual(result.shape, []); + }); + + it('should multiply two tensors with shape (2, 3) and (3, 3) with the correct values', () => { + const t1 = new Tensor([ + [1, 2, 3], + [4, 5, 6] + ]); + + const t2 = new Tensor([ + [9, 9, 1], + [6, 4, 3], + [5, 5, 6] + ]); + + const result = torch.matmul(t1, t2); + + const expected = [36, 32, 25, 96, 86, 55]; + + assert.deepStrictEqual(Array.from(result.toFlatArray()), expected); + assert.deepStrictEqual(result.shape, [2, 3]); + }); + + it('should output correct shape', () => { + function shape_of(shape1, shape2) { + const tensor1 = torch.randn(shape1); + const tensor2 = torch.randn(shape2); + const result = torch.matmul(tensor1, tensor2); + return result.shape; + } + + assert.deepStrictEqual(shape_of([3, 4], [4, 5]), [3, 5]); + assert.deepStrictEqual(shape_of([3, 4], [4]), [3]); + assert.deepStrictEqual(shape_of([10, 3, 4], [4]), [10, 3]); + assert.deepStrictEqual(shape_of([10, 3, 4], [10, 4, 5]), [10, 3, 5]); + assert.deepStrictEqual(shape_of([10, 3, 4], [4, 5]), [10, 3, 5]); + }); + }); + + describe('Transpose', () => { + it('should transpose a tensor', () => { + const t = new Tensor([ + [1, 2], + [3, 4] + ]); + const result = t.transpose(0, 1); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 3, 2, 4]); + assert.deepStrictEqual(result.shape, [2, 2]); + }); + }); + + describe('numel', () => { + it('should return the total number of elements', () => { + const t = new Tensor([[1, 2], [3, 4], [5, 6]]); + assert.strictEqual(torch.numel(t), 6); + }); + }); + + describe('randperm', () => { + it('should return a permutation of 0..n-1', () => { + torch.manual_seed(42); + const t = torch.randperm(5); + assert.strictEqual(t.numel(), 5); + const sorted = Array.from(t.toFlatArray()).sort((a, b) => a - b); + assert.deepStrictEqual(sorted, [0, 1, 2, 3, 4]); + }); + }); + + describe('seed', () => { + it('should return a seed and make rng deterministic', () => { + const s = torch.seed(); + assert.isNumber(s); + const a = torch.rand(3); + // Re-seed with same seed should produce same values + torch.manual_seed(s); + const b = torch.rand(3); + assert.deepStrictEqual(Array.from(a.toFlatArray()), Array.from(b.toFlatArray())); + }); + }); + + describe('allclose', () => { + it('returns true for identical tensors', () => { + const a = new Tensor([1, 2, 3]); + assert.strictEqual(torch.allclose(a, a), true); + }); + + it('returns true for tensors within default tolerance', () => { + const a = new Tensor([1.0, 2.0, 3.0]); + const b = new Tensor([1.0, 2.0, 3.0 + 1e-7]); + assert.strictEqual(torch.allclose(a, b), true); + }); + + it('returns false for tensors outside default tolerance', () => { + const a = new Tensor([1.0, 2.0, 3.0]); + const b = new Tensor([1.0, 2.0, 4.0]); + assert.strictEqual(torch.allclose(a, b), false); + }); + + it('returns false for tensors of different sizes', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([1, 2]); + assert.strictEqual(torch.allclose(a, b), false); + }); + + it('returns false for NaN when equal_nan=false', () => { + const a = new Tensor([1, NaN, 3]); + const b = new Tensor([1, NaN, 3]); + assert.strictEqual(torch.allclose(a, b, 1e-5, 1e-8, false), false); + }); + + it('returns true for NaN when equal_nan=true', () => { + const a = new Tensor([1, NaN, 3]); + const b = new Tensor([1, NaN, 3]); + assert.strictEqual(torch.allclose(a, b, 1e-5, 1e-8, true), true); + }); + + it('tensor.allclose() method matches torch.allclose()', () => { + const a = new Tensor([1.0, 2.0, 3.0]); + const b = new Tensor([1.0, 2.0, 3.0 + 1e-7]); + assert.strictEqual(a.allclose(b), torch.allclose(a, b)); + }); + }); + + describe('Softmax', () => { + it('outputs sum to 1 along specified dim', () => { + const x = torch.tensor([1.0, 2.0, 3.0]); + assert.closeTo(torch.softmax(x, 0).sum().item(), 1.0, 1e-6); + }); + + it('tensor.softmax() method works', () => { + const x = torch.tensor([[1.0, 2.0], [3.0, 4.0]]); + x.softmax(1).toArray().forEach(row => assert.closeTo(row.reduce((a, b) => a + b, 0), 1.0, 1e-6)); + }); + + it('negative dim works', () => { + const x = torch.tensor([[1.0, 2.0], [3.0, 4.0]]); + assert.deepStrictEqual(torch.softmax(x, 1).toArray(), torch.softmax(x, -1).toArray()); + }); + + it('gradient flows back through softmax (grad of sum = 0)', () => { + const x = torch.tensor([1.0, 2.0, 3.0], true); + torch.softmax(x, 0).sum().backward(); + x.grad.toFlatArray().forEach(g => assert.closeTo(g, 0.0, 1e-6)); + }); + }); + + describe('Clamp / Clip', () => { + it('clamps values below min and above max', () => { + assert.deepStrictEqual(torch.clamp(torch.tensor([-2.0, 0.0, 2.0, 5.0]), 0, 3).toArray(), [0.0, 0.0, 2.0, 3.0]); + }); + + it('torch.clip is an alias for torch.clamp', () => { + const x = torch.tensor([-1.0, 0.5, 2.0]); + assert.deepStrictEqual(torch.clamp(x, 0, 1).toArray(), torch.clip(x, 0, 1).toArray()); + }); + + it('tensor.clamp() method works', () => { + assert.deepStrictEqual(torch.tensor([-1.0, 0.5, 2.0]).clamp(0, 1).toArray(), [0.0, 0.5, 1.0]); + }); + + it('gradient is 1 inside range, 0 outside', () => { + const x = torch.tensor([-1.0, 0.5, 2.0], true); + torch.clamp(x, 0, 1).sum().backward(); + assert.deepStrictEqual(x.grad.toArray(), [0.0, 1.0, 0.0]); + }); + }); +}); diff --git a/test/generated.test.js b/test/generated.test.js new file mode 100644 index 00000000..7d880d9d --- /dev/null +++ b/test/generated.test.js @@ -0,0 +1,410 @@ +import * as torch from 'torch'; +import { Tensor } from 'torch'; +import { assert } from 'chai'; +import { testData } from './testcases.gen.js'; + +function assertDeepCloseTo(actual, expected, name = null, delta = 1e-3, first_call = true) { + if (Array.isArray(expected)) { + assert.lengthOf(actual, expected.length, 'Array lengths do not match'); + for (let i = 0; i < expected.length; i++) { + assertDeepCloseTo(actual[i], expected[i], name, delta, false); + } + } else { + if (Number.isNaN(expected)) { + assert.isTrue(Number.isNaN(actual), `${name}: Expected NaN but got ${actual}`); + } else if(!Number.isFinite(expected)) { + assert.equal(actual, expected, `${name}: Expected ${expected} but got ${actual}`); + } else { + assert.closeTo(actual, expected, delta, `${name}: Expected ${expected} but got ${actual}`); + } + } +} + +describe('Automated Tests', () => { + describe('Unary Operations', () => { + for (const [opName, tests] of Object.entries(testData.unary)) { + describe(`.${opName}()`, () => { + tests.forEach((test, idx) => { + it(`case ${idx + 1}`, () => { + const x = new Tensor(test.input, { requires_grad: true }); + const y = x[opName](); + assertDeepCloseTo(y.toArray(), test.expected_output, `${opName} output`); + y.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad, `${opName} grad`); + }); + }); + }); + } + }); + + describe('Binary Operations', () => { + for (const [opName, tests] of Object.entries(testData.binary)) { + describe(`.${opName}()`, () => { + tests.forEach((test, idx) => { + it(`case ${idx + 1}`, () => { + const x = new Tensor(test.input_x, { requires_grad: true }); + const y = new Tensor(test.input_y, { requires_grad: true }); + const out = x[opName](y); + assertDeepCloseTo(out.toArray(), test.expected_output, `${opName} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad_x, `${opName} grad x`); + assertDeepCloseTo(y.grad.toArray(), test.expected_grad_y, `${opName} grad y`); + }); + }); + }); + } + }); + + describe('Broadcasting Operations', () => { + testData.broadcasting?.forEach(test => { + it(test.test_name, () => { + const x = new Tensor(test.input_x, { requires_grad: true }); + const y = new Tensor(test.input_y, { requires_grad: true }); + + const out = x[test.op_name](y); + + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad_x, `${test.test_name} grad x`); + assertDeepCloseTo(y.grad.toArray(), test.expected_grad_y, `${test.test_name} grad y`); + }); + }); + }); + + describe('Matmul Operations', () => { + testData.matmul?.forEach(test => { + it(test.test_name, () => { + const x = new Tensor(test.input_x, { requires_grad: true }); + const y = new Tensor(test.input_y, { requires_grad: true }); + + const out = x.matmul(y); + + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad_x, `${test.test_name} grad x`); + assertDeepCloseTo(y.grad.toArray(), test.expected_grad_y, `${test.test_name} grad y`); + }); + }); + }); + + describe('Reduction Operations', () => { + testData.reductions?.forEach(test => { + it(test.test_name, () => { + const x = new Tensor(test.input, { requires_grad: true }); + let out; + if (test.dim === null || test.dim === undefined) { + out = x[test.op_name](); + } else { + out = x[test.op_name](test.dim, test.keepdim); + } + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad, `${test.test_name} grad`); + }); + }); + }); + + describe('Neural Network Modules', () => { + describe('nn.Linear', () => { + testData.linear?.forEach(test => { + it(test.test_name, () => { + const layer = new torch.nn.Linear(test.in_features, test.out_features); + + // Overwrite the layer's internal parameters with Python's starting state + layer.weight = new Tensor(test.weight, { requires_grad: true }); + layer.bias = new Tensor(test.bias, { requires_grad: true }); + + const x = new Tensor(test.input, { requires_grad: true }); + + const out = layer.forward(x); + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + + out.sum().backward(); + + assertDeepCloseTo(x.grad.toArray(), test.expected_grad_input, `${test.test_name} grad input`); + assertDeepCloseTo(layer.weight.grad.toArray(), test.expected_grad_weight, `${test.test_name} grad weight`); + assertDeepCloseTo(layer.bias.grad.toArray(), test.expected_grad_bias, `${test.test_name} grad bias`); + }); + }); + }); + + describe('Convolutions', () => { + testData.conv?.forEach(test => { + it(test.test_name, () => { + const ConvClass = torch.nn[test.conv_type]; + const layer = new ConvClass( + test.in_channels, + test.out_channels, + test.kernel_size, + test.stride, + test.padding, + test.dilation, + test.groups, + test.has_bias + ); + + layer.weight = new Tensor(test.weight, { requires_grad: true }); + if (test.has_bias) { + layer.bias = new Tensor(test.bias, { requires_grad: true }); + } + + const x = new Tensor(test.input, { requires_grad: true }); + + const out = layer.forward(x); + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + + out.sum().backward(); + + assertDeepCloseTo(x.grad.toArray(), test.expected_grad_input, `${test.test_name} grad input`); + assertDeepCloseTo(layer.weight.grad.toArray(), test.expected_grad_weight, `${test.test_name} grad weight`); + if (test.has_bias) { + assertDeepCloseTo(layer.bias.grad.toArray(), test.expected_grad_bias, `${test.test_name} grad bias`); + } + }); + }); + }); + + describe('Loss Functions', () => { + testData.loss?.forEach(test => { + it(test.test_name, () => { + const LossClass = torch.nn[test.loss_type]; + const loss_fn = new LossClass(); + + const input = new Tensor(test.input, { requires_grad: true }); + const target = new Tensor(test.target); + + const output = loss_fn.forward(input, target); + assertDeepCloseTo(output.toArray(), test.expected_output, `${test.test_name} output`); + + output.backward(); + assertDeepCloseTo(input.grad.toArray(), test.expected_grad_input, `${test.test_name} grad input`); + }); + }); + }); + + describe('Activation Functions', () => { + testData.activations?.forEach(test => { + it(test.test_name, () => { + const ActivationClass = torch.nn[test.activation_type]; + const kwargs = test.kwargs || {}; + const activation = test.activation_type === 'LeakyReLU' + ? new ActivationClass(kwargs.negative_slope) + : new ActivationClass(); + + const input = new Tensor(test.input, { requires_grad: true }); + + const output = activation.forward(input); + assertDeepCloseTo(output.toArray(), test.expected_output, `${test.test_name} output`); + + output.sum().backward(); + assertDeepCloseTo(input.grad.toArray(), test.expected_grad_input, `${test.test_name} grad input`); + }); + }); + }); + }); + + describe('Optimizers', () => { + testData.optimizers?.forEach(test => { + it(test.test_name, () => { + const w = new torch.nn.Parameter(test.initial_weight, { requires_grad: true }); + const x = new Tensor(test.input_x); + + const OptimizerClass = torch.optim[test.optimizer]; + let optimizer; + + if (test.optimizer === 'SGD') { + optimizer = new OptimizerClass( + [w], + test.kwargs.lr, + test.kwargs.momentum, + test.kwargs.dampening, + test.kwargs.weight_decay, + test.kwargs.nesterov, + test.kwargs.maximize + ); + } else if (test.optimizer === 'Adam') { + optimizer = new OptimizerClass( + [w], + test.kwargs.lr, + test.kwargs.betas, + test.kwargs.eps, + test.kwargs.weight_decay, + test.kwargs.amsgrad, + test.kwargs.maximize + ); + } else if (test.optimizer === 'Adagrad') { + optimizer = new OptimizerClass( + [w], + test.kwargs.lr, + test.kwargs.lr_decay, + test.kwargs.weight_decay, + test.kwargs.eps + ); + } + + optimizer.zero_grad(); + const loss = w.mul(x).sum(); + loss.backward(); + + assertDeepCloseTo(w.grad.toArray(), test.expected_grad, `${test.test_name} grad`); + + optimizer.step(); + + assertDeepCloseTo(w.toArray(), test.expected_updated_weight, `${test.test_name} updated weight`); + }); + }); + }); + + describe('Cat Operations', () => { + testData.cat?.forEach(test => { + it(test.test_name, () => { + const tensors = test.inputs.map(inp => new Tensor(inp, { requires_grad: true })); + const out = torch.cat(tensors, test.dim); + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + tensors.forEach((t, i) => { + assertDeepCloseTo(t.grad.toArray(), test.expected_grads[i], `${test.test_name} grad[${i}]`); + }); + }); + }); + }); + + describe('Expand Operations', () => { + testData.expand?.forEach(test => { + it(test.test_name, () => { + const x = new Tensor(test.input, { requires_grad: true }); + const out = x.expand(test.expand_shape); + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad, `${test.test_name} grad`); + }); + }); + }); + describe('Softmax', () => { + testData.softmax?.forEach(test => { + it(test.test_name, () => { + const x = new Tensor(test.input, { requires_grad: true }); + const out = torch.softmax(x, test.dim); + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad, `${test.test_name} grad`); + }); + }); + }); + + describe('Clamp', () => { + testData.clamp?.forEach(test => { + it(test.test_name, () => { + const x = new Tensor(test.input, { requires_grad: true }); + const out = torch.clamp(x, test.min, test.max); + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad, `${test.test_name} grad`); + }); + }); + }); + + describe('MaxPool2d', () => { + testData.maxpool?.forEach(test => { + it(test.test_name, () => { + const x = new Tensor(test.input, { requires_grad: true }); + const pool = new torch.nn.MaxPool2d(test.kernel_size, test.stride, test.padding); + const out = pool.forward(x); + assertDeepCloseTo(out.toArray(), test.expected_output, `${test.test_name} output`); + out.sum().backward(); + assertDeepCloseTo(x.grad.toArray(), test.expected_grad, `${test.test_name} grad`); + }); + }); + }); + + describe('Export', () => { + testData.export?.forEach(test => { + it(`export_${test.test_name}`, () => { + let model; + + if (test.model_type === 'LinearReLU') { + const linear = new torch.nn.Linear(test.in_features, test.out_features); + linear.weight = new Tensor(test.weight, { requires_grad: true }); + linear.bias = new Tensor(test.bias, { requires_grad: true }); + const relu = new torch.nn.ReLU(); + model = new torch.nn.Sequential(linear, relu); + } else if (test.model_type === 'TwoLayer') { + const linear1 = new torch.nn.Linear(test.linear1_in, test.linear1_out); + linear1.weight = new Tensor(test.linear1_weight, { requires_grad: true }); + linear1.bias = new Tensor(test.linear1_bias, { requires_grad: true }); + const relu = new torch.nn.ReLU(); + const linear2 = new torch.nn.Linear(test.linear2_in, test.linear2_out); + linear2.weight = new Tensor(test.linear2_weight, { requires_grad: true }); + linear2.bias = new Tensor(test.linear2_bias, { requires_grad: true }); + const sigmoid = new torch.nn.Sigmoid(); + model = new torch.nn.Sequential(linear1, relu, linear2, sigmoid); + } + + const x = new Tensor(test.input); + const ep = torch.export_(model, [x]); + + // Verify graph structure: must have placeholder, call_function, and output nodes + const placeholders = ep.graph.filter(n => n.op === 'placeholder'); + const callFunctions = ep.graph.filter(n => n.op === 'call_function'); + const outputs = ep.graph.filter(n => n.op === 'output'); + + assert.isAbove(placeholders.length, 0, 'Should have placeholder nodes'); + assert.isAbove(callFunctions.length, 0, 'Should have call_function nodes'); + assert.equal(outputs.length, 1, 'Should have exactly one output node'); + + // Verify all call_function nodes have aten.* targets + for (const node of callFunctions) { + assert.match(node.target, /^aten\./, `Target should start with aten.: ${node.target}`); + } + + // Verify graph signature matches PyTorch's + // Input specs: parameters should be PARAMETER, user inputs should be USER_INPUT + const expectedParamSpecs = test.expected_input_specs.filter(s => s.kind === 'PARAMETER'); + const expectedUserSpecs = test.expected_input_specs.filter(s => s.kind === 'USER_INPUT'); + + const actualParamSpecs = ep.graph_signature.input_specs.filter(s => s.kind === 'PARAMETER'); + const actualUserSpecs = ep.graph_signature.input_specs.filter(s => s.kind === 'USER_INPUT'); + + assert.equal(actualParamSpecs.length, expectedParamSpecs.length, 'Number of parameter specs should match'); + assert.equal(actualUserSpecs.length, expectedUserSpecs.length, 'Number of user input specs should match'); + + // Verify parameter placeholder naming matches PyTorch convention + for (let i = 0; i < expectedParamSpecs.length; i++) { + assert.equal(actualParamSpecs[i].name, expectedParamSpecs[i].name, + `Parameter spec name should match: expected ${expectedParamSpecs[i].name}`); + assert.equal(actualParamSpecs[i].kind, 'PARAMETER'); + } + + // Verify user input naming + for (let i = 0; i < expectedUserSpecs.length; i++) { + assert.equal(actualUserSpecs[i].name, expectedUserSpecs[i].name, + `User input spec name should match: expected ${expectedUserSpecs[i].name}`); + } + + // Verify output spec + assert.equal(ep.graph_signature.output_specs.length, test.expected_output_specs.length); + assert.equal(ep.graph_signature.output_specs[0].kind, 'USER_OUTPUT'); + + // Verify placeholder node shapes match + const expectedPlaceholders = test.expected_nodes.filter(n => n.op === 'placeholder'); + for (let i = 0; i < expectedPlaceholders.length; i++) { + assert.equal(placeholders[i].name, expectedPlaceholders[i].name, + `Placeholder name should match: expected ${expectedPlaceholders[i].name}`); + if (expectedPlaceholders[i].val_shape) { + assert.deepEqual(placeholders[i].val_shape, expectedPlaceholders[i].val_shape, + `Placeholder shape should match for ${expectedPlaceholders[i].name}`); + } + } + + // Verify output node references a valid graph node + const outputArgs = outputs[0].args; + assert.isAbove(outputArgs.length, 0, 'Output should reference at least one node'); + + // All node names should be unique + const allNames = ep.graph.map(n => n.name); + const uniqueNames = new Set(allNames); + assert.equal(allNames.length, uniqueNames.size, 'All node names should be unique'); + }); + }); + }); +}); diff --git a/test/grad_mode.test.js b/test/grad_mode.test.js new file mode 100644 index 00000000..e6ca5e2d --- /dev/null +++ b/test/grad_mode.test.js @@ -0,0 +1,164 @@ +import { assert } from 'chai'; +import { Tensor, no_grad, enable_no_grad, disable_no_grad, is_grad_enabled } from 'torch'; + +describe('Grad Mode', () => { + afterEach(() => { + // Safety: ensure grad mode is always restored after each test + if (!is_grad_enabled()) disable_no_grad(true); + }); + + describe('is_grad_enabled', () => { + it('should be enabled by default', () => { + assert.isTrue(is_grad_enabled()); + }); + }); + + describe('no_grad', () => { + it('should disable gradient tracking inside callback', () => { + const x = new Tensor([2.0], { requires_grad: true }); + const y = no_grad(() => x.mul(x)); + + assert.strictEqual(y.item(), 4.0); + assert.isFalse(y.requires_grad); + assert.isNull(y.grad_fn); + }); + + it('should re-enable gradient tracking after callback', () => { + no_grad(() => {}); + assert.isTrue(is_grad_enabled()); + }); + + it('should re-enable gradient tracking even if callback throws', () => { + try { + no_grad(() => { throw new Error('boom'); }); + } catch (e) { + // expected + } + assert.isTrue(is_grad_enabled()); + }); + + it('should return the value from the callback', () => { + const result = no_grad(() => 42); + assert.strictEqual(result, 42); + }); + + it('should nest correctly', () => { + assert.isTrue(is_grad_enabled()); + no_grad(() => { + assert.isFalse(is_grad_enabled()); + no_grad(() => { + assert.isFalse(is_grad_enabled()); + }); + // still disabled after inner no_grad returns + assert.isFalse(is_grad_enabled()); + }); + assert.isTrue(is_grad_enabled()); + }); + + it('should not build computation graph for unary ops', () => { + const x = new Tensor([2.0], { requires_grad: true }); + const y = no_grad(() => x.exp()); + + assert.isFalse(y.requires_grad); + assert.isNull(y.grad_fn); + }); + + it('should not build computation graph for chained ops', () => { + const x = new Tensor([3.0], { requires_grad: true }); + const y = no_grad(() => x.mul(x).add(x)); + + assert.closeTo(y.item(), 12.0, 1e-6); + assert.isFalse(y.requires_grad); + assert.isNull(y.grad_fn); + }); + + it('should not accumulate gradients for operations inside no_grad', () => { + const x = new Tensor([2.0], { requires_grad: true }); + + // Normal forward + backward + const y = x.mul(x); + y.backward(); + assert.strictEqual(x.grad.item(), 4.0); + + // Operations inside no_grad should not affect grad + x.grad = null; + const z = no_grad(() => x.mul(new Tensor(100.0))); + // z has no graph, so backward would be a no-op + assert.isFalse(z.requires_grad); + }); + }); + + describe('enable_no_grad / disable_no_grad', () => { + it('enable_no_grad should disable grad and return previous state', () => { + assert.isTrue(is_grad_enabled()); + const prev = enable_no_grad(); + assert.isTrue(prev); // was enabled + assert.isFalse(is_grad_enabled()); + disable_no_grad(prev); + assert.isTrue(is_grad_enabled()); + }); + + it('should work for pyodide-style usage pattern', () => { + // Simulate: python side calls enable_no_grad, does work, calls disable_no_grad + const prev = enable_no_grad(); + + const x = new Tensor([5.0], { requires_grad: true }); + const y = x.mul(x); + assert.isFalse(y.requires_grad); + assert.isNull(y.grad_fn); + + disable_no_grad(prev); + assert.isTrue(is_grad_enabled()); + + // After restoring, grad should work again + const a = new Tensor([3.0], { requires_grad: true }); + const b = a.mul(a); + assert.isTrue(b.requires_grad); + assert.isNotNull(b.grad_fn); + }); + + it('should handle nested enable/disable correctly', () => { + const prev1 = enable_no_grad(); + assert.isFalse(is_grad_enabled()); + + const prev2 = enable_no_grad(); + assert.isFalse(is_grad_enabled()); + + disable_no_grad(prev2); // prev2 is false, still disabled + assert.isFalse(is_grad_enabled()); + + disable_no_grad(prev1); // prev1 is true, now enabled + assert.isTrue(is_grad_enabled()); + }); + }); + + describe('interaction with requires_grad', () => { + it('tensors created with requires_grad=true keep the flag, but ops do not track', () => { + const x = new Tensor([2.0], { requires_grad: true }); + // x.requires_grad is an intrinsic property of the tensor, not affected by grad mode + assert.isTrue(x.requires_grad); + + const y = no_grad(() => { + // x still says requires_grad=true, but the operation should not build a graph + return x.mul(x); + }); + + assert.isFalse(y.requires_grad); + }); + + it('backward still works on tensors created before no_grad', () => { + // Build graph normally + const x = new Tensor([3.0], { requires_grad: true }); + const y = x.mul(x); // y = 9, dy/dx = 6 + + // Enter no_grad for some unrelated work + no_grad(() => { + const _ = x.add(new Tensor(100.0)); // no graph built + }); + + // Original graph is intact + y.backward(); + assert.strictEqual(x.grad.item(), 6.0); + }); + }); +}); diff --git a/test/index.html b/test/index.html new file mode 100644 index 00000000..29e8f241 --- /dev/null +++ b/test/index.html @@ -0,0 +1,46 @@ + + + + + + Mocha Tests + + + + + +
+ + + + + + + + + + diff --git a/test/loss.test.js b/test/loss.test.js new file mode 100644 index 00000000..c8a8b99a --- /dev/null +++ b/test/loss.test.js @@ -0,0 +1,77 @@ + +import * as torch from 'torch'; +import { Tensor } from 'torch'; +import { assert } from 'chai'; + +describe('BCELoss', () => { + it('scalar', () => { + const input = new Tensor(0.123, { requires_grad: true }); + const target = new Tensor(0.9876); + + const loss = new torch.nn.BCELoss(); + const result = loss.forward(input, target); + + assert.closeTo(result.item(), 2.0712, 0.001); + }) + + it('should calculate binary cross entropy loss correctly', () => { + /* Python: + import torch + x, y + */ + const m = new torch.nn.Sigmoid() + const input = new Tensor([[0.5424, 1.3919], + [-1.0297, -0.6352], + [0.5700, -1.0037]], + { requires_grad: true }); + const target = new Tensor([[0.8340, 0.4923], + [0.7729, 0.7560], + [0.5616, 0.0999]]); + + const loss = new torch.nn.BCELoss(); + const result = loss.forward(m.forward(input), target); + + assert.closeTo(result.item(), 0.7657, 0.001); + }); +}); + +describe('NLLLoss', () => { + it('basic mean reduction', () => { + const loss_fn = new torch.nn.NLLLoss(); + const logProbs = torch.tensor([ + [Math.log(0.1), Math.log(0.3), Math.log(0.6)], + [Math.log(0.7), Math.log(0.2), Math.log(0.1)], + ]); + const target = torch.tensor([2, 0]); + const out = loss_fn.forward(logProbs, target); + const expected = -(Math.log(0.6) + Math.log(0.7)) / 2; + assert.closeTo(out.item(), expected, 1e-5); + }); + + it('gradient flows back to input', () => { + const loss_fn = new torch.nn.NLLLoss(); + const logProbs = torch.tensor([[-1.0, -2.0, -0.5], [-0.8, -1.2, -0.3]], true); + const target = torch.tensor([0, 2]); + loss_fn.forward(logProbs, target).backward(); + assert.closeTo(logProbs.grad.toArray()[0][0], -0.5, 1e-6); + assert.closeTo(logProbs.grad.toArray()[1][2], -0.5, 1e-6); + assert.closeTo(logProbs.grad.toArray()[0][1], 0.0, 1e-6); + }); + + it('reduction=sum', () => { + const loss_fn = new torch.nn.NLLLoss('sum'); + const logProbs = torch.tensor([[-1.0, -0.5], [-0.8, -0.3]]); + const target = torch.tensor([1, 0]); + assert.closeTo(loss_fn.forward(logProbs, target).item(), 1.3, 1e-5); + }); + + it('reduction=none returns per-sample losses', () => { + const loss_fn = new torch.nn.NLLLoss('none'); + const logProbs = torch.tensor([[-1.0, -0.5], [-0.8, -0.3]]); + const target = torch.tensor([1, 0]); + const out = loss_fn.forward(logProbs, target); + assert.deepStrictEqual(out.shape, [2]); + assert.closeTo(out.toArray()[0], 0.5, 1e-5); + assert.closeTo(out.toArray()[1], 0.8, 1e-5); + }); +}); diff --git a/test/missing_args.test.ts b/test/missing_args.test.ts new file mode 100644 index 00000000..4a4d6f6a --- /dev/null +++ b/test/missing_args.test.ts @@ -0,0 +1,143 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; +import { Tensor } from 'torch'; + +describe('Linear bias parameter', () => { + it('should create Linear with bias=true by default', () => { + const linear = new torch.nn.Linear(4, 3); + assert.strictEqual(linear.parameters().length, 2); + assert.deepStrictEqual(linear.parameters()[0].shape, [3, 4]); // weight + assert.deepStrictEqual(linear.parameters()[1].shape, [3]); // bias + }); + + it('should create Linear with bias=false', () => { + const linear = new torch.nn.Linear(4, 3, false); + assert.strictEqual(linear.parameters().length, 1); + assert.deepStrictEqual(linear.parameters()[0].shape, [3, 4]); // weight only + assert.isNull(linear.bias); + }); + + it('should forward without bias', () => { + const linear = new torch.nn.Linear(3, 2, false); + const input = new Tensor([1, 2, 3]); + const output = linear.forward(input); + assert.deepStrictEqual(output.shape, [2]); + }); + + it('should backward without bias', () => { + const linear = new torch.nn.Linear(3, 2, false); + const input = new Tensor([1, 2, 3]); + const output = linear.forward(input).sum(); + output.backward(); + assert.deepStrictEqual(linear.weight.grad.shape, [2, 3]); + assert.isNull(linear.bias); + }); +}); + +describe('Loss reduction parameter', () => { + describe('MSELoss', () => { + const pred = new Tensor([1.0, 2.0, 3.0]); + const target = new Tensor([1.5, 2.5, 3.5]); + + it('reduction=mean (default)', () => { + const loss = new torch.nn.MSELoss(); + const result = loss.forward(pred, target); + // (0.25 + 0.25 + 0.25) / 3 = 0.25 + assert.closeTo(result.item(), 0.25, 1e-5); + }); + + it('reduction=sum', () => { + const loss = new torch.nn.MSELoss('sum'); + const result = loss.forward(pred, target); + // 0.25 + 0.25 + 0.25 = 0.75 + assert.closeTo(result.item(), 0.75, 1e-5); + }); + + it('reduction=none', () => { + const loss = new torch.nn.MSELoss('none'); + const result = loss.forward(pred, target); + assert.deepStrictEqual(result.shape, [3]); + const data = result.toFlatArray(); + assert.closeTo(data[0], 0.25, 1e-5); + assert.closeTo(data[1], 0.25, 1e-5); + assert.closeTo(data[2], 0.25, 1e-5); + }); + }); + + describe('L1Loss', () => { + const pred = new Tensor([1.0, 2.0, 3.0]); + const target = new Tensor([1.5, 2.5, 3.5]); + + it('reduction=mean (default)', () => { + const loss = new torch.nn.L1Loss(); + const result = loss.forward(pred, target); + assert.closeTo(result.item(), 0.5, 1e-5); + }); + + it('reduction=sum', () => { + const loss = new torch.nn.L1Loss('sum'); + const result = loss.forward(pred, target); + assert.closeTo(result.item(), 1.5, 1e-5); + }); + + it('reduction=none', () => { + const loss = new torch.nn.L1Loss('none'); + const result = loss.forward(pred, target); + assert.deepStrictEqual(result.shape, [3]); + const data = result.toFlatArray(); + assert.closeTo(data[0], 0.5, 1e-5); + assert.closeTo(data[1], 0.5, 1e-5); + assert.closeTo(data[2], 0.5, 1e-5); + }); + }); + + describe('CrossEntropyLoss', () => { + // Logits for batch of 3, 3 classes + const input = new Tensor([[2.0, 1.0, 0.1], + [0.5, 2.5, 0.3], + [0.1, 0.2, 3.0]], { requires_grad: true }); + const target = new Tensor([0, 1, 2]); + + it('reduction=mean (default)', () => { + const loss = new torch.nn.CrossEntropyLoss(); + const result = loss.forward(input, target); + assert.deepStrictEqual(result.shape, []); + assert.closeTo(result.item(), 0.2489, 0.01); + }); + + it('reduction=sum', () => { + const loss = new torch.nn.CrossEntropyLoss('sum'); + const result = loss.forward(input, target); + assert.deepStrictEqual(result.shape, []); + assert.closeTo(result.item(), 0.7467, 0.01); + }); + + it('reduction=none', () => { + const loss = new torch.nn.CrossEntropyLoss('none'); + const result = loss.forward(input, target); + assert.deepStrictEqual(result.shape, [3]); + }); + + it('reduction=mean backward', () => { + const inp = new Tensor([[2.0, 1.0, 0.1], + [0.5, 2.5, 0.3], + [0.1, 0.2, 3.0]], { requires_grad: true }); + const tgt = new Tensor([0, 1, 2]); + const loss = new torch.nn.CrossEntropyLoss(); + const result = loss.forward(inp, tgt); + result.backward(); + assert.deepStrictEqual(inp.grad.shape, [3, 3]); + }); + + it('reduction=sum backward', () => { + const inp = new Tensor([[2.0, 1.0, 0.1], + [0.5, 2.5, 0.3], + [0.1, 0.2, 3.0]], { requires_grad: true }); + const tgt = new Tensor([0, 1, 2]); + const loss = new torch.nn.CrossEntropyLoss('sum'); + const result = loss.forward(inp, tgt); + result.backward(); + assert.deepStrictEqual(inp.grad.shape, [3, 3]); + }); + }); +}); diff --git a/test/module.test.js b/test/module.test.js new file mode 100644 index 00000000..092c4779 --- /dev/null +++ b/test/module.test.js @@ -0,0 +1,226 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; + +describe('Module', () => { + describe('Linear', () => { + it('should create a linear module', () => { + const linear = new torch.nn.Linear(10, 20); + assert.deepStrictEqual(linear.parameters().length, 2); + assert.deepStrictEqual(linear.parameters()[0].shape, [20, 10]); + assert.deepStrictEqual(linear.parameters()[1].shape, [20]); + }); + + it('should forward a tensor', () => { + const linear = new torch.nn.Linear(10, 20); + const input = new torch.Tensor([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + const output = linear.forward(input); + assert.deepStrictEqual(output.shape, [20]); + }); + + it('should forward a tensor (2D)', () => { + // example from https://docs.pytorch.org/docs/stable/generated/torch.nn.Linear.html + const m = new torch.nn.Linear(20, 30); + const input = torch.randn(128, 20); + const output = m.forward(input); + assert.deepStrictEqual(output.shape, [128, 30]); + }); + + it('should backward a linear with correct shape', () => { + const linear = new torch.nn.Linear(10, 20); + const input = new torch.Tensor([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + const output = linear.forward(input).sum(); + output.backward(); + assert.deepStrictEqual(linear.weight.grad.shape, [20, 10]); + assert.deepStrictEqual(linear.bias.grad.shape, [20]); + }); + + it('should backward a linear with correct values', () => { + const linear = new torch.nn.Linear(2, 3); + linear.weight.data = new torch.Tensor([[1, 2], [3, 4], [5, 6]]); + linear.bias.data = new torch.Tensor([1, 2, 3]); + const input = new torch.Tensor([1, 2]); + const output = linear.forward(input).sum(); + output.backward(); + assert.deepStrictEqual(linear.weight.grad.data, [1, 2, 1, 2, 1, 2]); + assert.deepStrictEqual(linear.bias.grad.data, [1, 1, 1]); + }); + }); + + describe('Sequential', () => { + it('should forward the correct shape', () => { + const model = new torch.nn.Sequential( + new torch.nn.Linear(10, 20), + new torch.nn.ReLU(), + new torch.nn.Linear(20, 30) + ); + + const input = new torch.Tensor([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + const output = model.forward(input); + assert.deepStrictEqual(output.shape, [30]); + }); + + it('should append a module', () => { + const model = new torch.nn.Sequential(); + model.append(new torch.nn.Linear(10, 20)); + model.append(new torch.nn.ReLU()); + model.append(new torch.nn.Linear(20, 30)); + + const input = new torch.Tensor([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + const output = model.forward(input); + assert.deepStrictEqual(output.shape, [30]); + }); + + it('should extend with modules', () => { + const model = new torch.nn.Sequential(new torch.nn.Linear(10, 20), new torch.nn.ReLU()); + const model2 = new torch.nn.Sequential(new torch.nn.Linear(20, 30), new torch.nn.ReLU()); + model.extend(model2); + + const input = new torch.Tensor([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + const output = model.forward(input); + assert.deepStrictEqual(output.shape, [30]); + }); + + it('should insert a module', () => { + const model = new torch.nn.Sequential( + new torch.nn.Linear(10, 20), + new torch.nn.Linear(30, 40) + ); + model.insert(1, new torch.nn.Linear(20, 30)); + + const input = new torch.Tensor([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + const output = model.forward(input); + assert.deepStrictEqual(output.shape, [40]); + }); + }); + + describe('LeakyReLU', () => { + it('positive values pass through unchanged', () => { + const lr = new torch.nn.LeakyReLU(); + const y = lr.forward(torch.tensor([1.0, 2.0, 3.0])); + assert.deepStrictEqual(y.toArray(), [1.0, 2.0, 3.0]); + }); + + it('negative values scaled by negative_slope (default 0.01)', () => { + const lr = new torch.nn.LeakyReLU(); + const y = lr.forward(torch.tensor([-1.0, -2.0])); + assert.closeTo(y.toArray()[0], -0.01, 1e-6); + assert.closeTo(y.toArray()[1], -0.02, 1e-6); + }); + + it('custom negative_slope is respected', () => { + const lr = new torch.nn.LeakyReLU(0.2); + const y = lr.forward(torch.tensor([-1.0, -2.0])); + assert.closeTo(y.toArray()[0], -0.2, 1e-6); + assert.closeTo(y.toArray()[1], -0.4, 1e-6); + }); + + it('gradient for positive input is 1, negative is negative_slope', () => { + const lr = new torch.nn.LeakyReLU(0.1); + const x = torch.tensor([1.0, -1.0], true); + lr.forward(x).sum().backward(); + assert.closeTo(x.grad.toArray()[0], 1.0, 1e-6); + assert.closeTo(x.grad.toArray()[1], 0.1, 1e-6); + }); + }); + + describe('MaxPool2d', () => { + it('basic 2x2 pooling on 1x1x4x4 input', () => { + const pool = new torch.nn.MaxPool2d(2); + const x = torch.tensor([[[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]]]]); + const y = pool.forward(x); + assert.deepStrictEqual(y.shape, [1, 1, 2, 2]); + assert.strictEqual(y.toArray()[0][0][0][0], 6); + assert.strictEqual(y.toArray()[0][0][1][1], 16); + }); + + it('output shape is correct with stride', () => { + assert.deepStrictEqual(new torch.nn.MaxPool2d(2, 1).forward(torch.randn(1, 1, 4, 4)).shape, [1, 1, 3, 3]); + }); + + it('gradient accumulates at argmax positions', () => { + const pool = new torch.nn.MaxPool2d(2); + const x = torch.tensor([[[[1.0, 2.0], [3.0, 4.0]]]], true); + pool.forward(x).sum().backward(); + assert.deepStrictEqual(x.grad.toArray(), [[[[0.0, 0.0], [0.0, 1.0]]]]); + }); + + it('accepts 3D input (no batch dim)', () => { + assert.deepStrictEqual(new torch.nn.MaxPool2d(2).forward(torch.randn(1, 4, 4)).shape, [1, 2, 2]); + }); + }); + + describe('Dropout', () => { + it('in eval mode passes input through unchanged', () => { + const drop = new torch.nn.Dropout(0.5); + drop.eval(); + const x = torch.tensor([1.0, 2.0, 3.0, 4.0]); + assert.deepStrictEqual(drop.forward(x).toArray(), x.toArray()); + }); + + it('p=1 always zeros out in training mode', () => { + const drop = new torch.nn.Dropout(1.0); + assert.deepStrictEqual(drop.forward(torch.tensor([1.0, 2.0, 3.0])).toArray(), [0.0, 0.0, 0.0]); + }); + + it('train() / eval() toggle training attribute', () => { + const drop = new torch.nn.Dropout(0.5); + assert.isTrue(drop.training); + drop.eval(); + assert.isFalse(drop.training); + drop.train(); + assert.isTrue(drop.training); + }); + }); + + describe('Softmax', () => { + it('output sums to 1 along specified dim', () => { + const sm = new torch.nn.Softmax(1); + const y = sm.forward(torch.tensor([[1.0, 2.0, 3.0]])); + assert.closeTo(y.toArray()[0].reduce((a, b) => a + b, 0), 1.0, 1e-6); + }); + + it('output values are in (0, 1)', () => { + const y = new torch.nn.Softmax(0).forward(torch.randn(5)); + y.toFlatArray().forEach(v => { assert.isAbove(v, 0); assert.isBelow(v, 1); }); + }); + }); + + describe('Flatten', () => { + it('default flattens all dims except batch (start_dim=1)', () => { + const y = new torch.nn.Flatten().forward(torch.tensor([[[[1.0, 2.0], [3.0, 4.0]]]])); + assert.deepStrictEqual(y.shape, [1, 4]); + }); + + it('custom start_dim and end_dim', () => { + assert.deepStrictEqual(new torch.nn.Flatten(0, 1).forward(torch.randn(2, 3, 4)).shape, [6, 4]); + }); + }); + + describe('training mode propagation', () => { + it('training=true by default', () => { + assert.isTrue(new torch.nn.Linear(3, 2).training); + }); + + it('eval() sets training=false', () => { + const linear = new torch.nn.Linear(3, 2); + linear.eval(); + assert.isFalse(linear.training); + }); + + it('train() restores training=true', () => { + const linear = new torch.nn.Linear(3, 2); + linear.eval(); + linear.train(); + assert.isTrue(linear.training); + }); + + it('eval() propagates to submodules in Sequential', () => { + const drop = new torch.nn.Dropout(0.5); + const net = new torch.nn.Sequential(new torch.nn.Linear(2, 2), drop); + net.eval(); + assert.isFalse(drop.training); + net.train(); + assert.isTrue(drop.training); + }); + }); +}); diff --git a/test/nn_functional.test.js b/test/nn_functional.test.js new file mode 100644 index 00000000..e21d3c84 --- /dev/null +++ b/test/nn_functional.test.js @@ -0,0 +1,56 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; + +describe('NN Functional', () => { + describe('Relu', () => { + it('should forward a tensor', () => { + const x = new torch.Tensor([1, -2, 3, -4, 5]); + const result = torch.nn.functional.relu(x); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 0, 3, 0, 5]); + assert.deepStrictEqual(result.shape, [5]); + }); + }); + + describe('leaky_relu', () => { + it('passes positive values unchanged', () => { + const y = torch.nn.functional.leaky_relu(torch.tensor([1.0, 2.0])); + assert.deepStrictEqual(y.toArray(), [1.0, 2.0]); + }); + + it('scales negative values by negative_slope', () => { + const y = torch.nn.functional.leaky_relu(torch.tensor([-2.0]), 0.1); + assert.closeTo(y.toArray()[0], -0.2, 1e-6); + }); + }); + + describe('max_pool2d', () => { + it('basic 2x2 pooling', () => { + const x = torch.tensor([[[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]]]]); + const y = torch.nn.functional.max_pool2d(x, 2); + assert.deepStrictEqual(y.shape, [1, 1, 2, 2]); + assert.strictEqual(y.toArray()[0][0][0][0], 6); + assert.strictEqual(y.toArray()[0][0][1][1], 16); + }); + + it('output shape is correct with explicit stride', () => { + assert.deepStrictEqual(torch.nn.functional.max_pool2d(torch.randn(1, 1, 4, 4), 2, 1).shape, [1, 1, 3, 3]); + }); + }); + + describe('nll_loss', () => { + it('mean reduction matches expected value', () => { + const logProbs = torch.tensor([ + [Math.log(0.1), Math.log(0.3), Math.log(0.6)], + [Math.log(0.7), Math.log(0.2), Math.log(0.1)], + ]); + const out = torch.nn.functional.nll_loss(logProbs, torch.tensor([2, 0])); + assert.closeTo(out.item(), -(Math.log(0.6) + Math.log(0.7)) / 2, 1e-5); + }); + + it('sum reduction', () => { + const logProbs = torch.tensor([[-1.0, -0.5], [-0.8, -0.3]]); + const out = torch.nn.functional.nll_loss(logProbs, torch.tensor([1, 0]), 'sum'); + assert.closeTo(out.item(), 1.3, 1e-5); + }); + }); +}); diff --git a/test/optimizers.test.js b/test/optimizers.test.js new file mode 100644 index 00000000..c192e69e --- /dev/null +++ b/test/optimizers.test.js @@ -0,0 +1,123 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; + +const EPS = 1e-6; + +describe('Optimizers', () => { + describe('SGD', () => { + it('should update parameters', () => { + const x = new torch.Tensor([1.0], { requires_grad: true }); + const sgd = new torch.optim.SGD([x], 0.01); + + const y = x.mul(new torch.Tensor(2.0)); + y.backward(); + + /** + * x = 1, grad = 2 + * x_new = x - lr * grad = 1 - 0.01 * 2 = 0.98 + */ + sgd.step(); + const actual = sgd.params[0].data[0]; + const expected = 0.98; + assert.closeTo(actual, expected, EPS); + }); + + it('should update parameters correctly over multiple steps (quadratic)', () => { + const x = new torch.Tensor([0.5], { requires_grad: true }); + const sgd = new torch.optim.SGD([x], 0.1); + + let y = x.pow(2); + + /** + * x = 0.5, grad = 2x = 1 + * x_new = x - lr * grad = 0.5 - 0.1 * 1 = 0.4 + */ + y.backward(); + sgd.step(); + sgd.zero_grad(); + + let actual = x.data[0]; + assert.closeTo(actual, 0.4, EPS); + + /** + * x = 0.4, grad = 2x = 0.8 + * x_new = x - lr * grad = 0.4 - 0.1 * 0.8 = 0.32 + */ + y = x.pow(2); + y.backward(); + sgd.step(); + + actual = x.data[0]; + assert.closeTo(actual, 0.32, EPS); + }); + }); + + describe('Adam', () => { + it('should handle bias correction with constant gradient', () => { + const x = new torch.Tensor([10.0], { requires_grad: true }); + const adam = new torch.optim.Adam([x], 0.1); + + let y = x.mul(2); + y.backward(); + adam.step(); + adam.zero_grad(); + + assert.closeTo(x.data[0], 9.9, EPS); + + y = x.mul(2); + y.backward(); + adam.step(); + + assert.closeTo(x.data[0], 9.8, EPS); + }); + }); + + describe('Adagrad', () => { + it('parameters are updated', () => { + const w = new torch.nn.Parameter(torch.tensor([1.0, 2.0, 3.0])); + const opt = new torch.optim.Adagrad([w], 0.1); + opt.zero_grad(); + w.mul(torch.tensor([1.0, 1.0, 1.0])).sum().backward(); + const before = [...w.toFlatArray()]; + opt.step(); + w.toFlatArray().forEach((v, i) => assert.notEqual(v, before[i])); + }); + + it('step size decreases over time (accumulating squared gradients)', () => { + const w = new torch.nn.Parameter(torch.tensor([2.0])); + const opt = new torch.optim.Adagrad([w], 0.1); + const steps = []; + for (let i = 0; i < 3; i++) { + opt.zero_grad(); + w.mul(torch.ones(1)).sum().backward(); + const before = w.item(); + opt.step(); + steps.push(Math.abs(w.item() - before)); + } + assert.isBelow(steps[1], steps[0]); + assert.isBelow(steps[2], steps[1]); + }); + + it('weight_decay adds L2 regularization', () => { + const w1 = new torch.nn.Parameter(torch.tensor([1.0, 2.0])); + const w2 = new torch.nn.Parameter(torch.tensor([1.0, 2.0])); + const opt1 = new torch.optim.Adagrad([w1], 0.1, 0.0, 0.0); + const opt2 = new torch.optim.Adagrad([w2], 0.1, 0.0, 0.01); + for (const [w, opt] of [[w1, opt1], [w2, opt2]]) { + opt.zero_grad(); + w.mul(torch.ones(2)).sum().backward(); + opt.step(); + } + assert.notDeepEqual(w1.toArray(), w2.toArray()); + }); + + it('zero_grad clears gradients', () => { + const w = new torch.nn.Parameter(torch.tensor([1.0, 2.0])); + const opt = new torch.optim.Adagrad([w], 0.1); + w.mul(torch.ones(2)).sum().backward(); + assert.isNotNull(w.grad); + opt.zero_grad(); + assert.isNull(w.grad); + }); + }); +}); diff --git a/test/parameter.test.js b/test/parameter.test.js new file mode 100644 index 00000000..a1776309 --- /dev/null +++ b/test/parameter.test.js @@ -0,0 +1,74 @@ +import { assert } from 'chai'; +import { Tensor, no_grad, enable_no_grad, disable_no_grad, is_grad_enabled, nn } from 'torch'; + +const { Parameter } = nn; + +describe('Parameter', () => { + afterEach(() => { + // Safety: ensure grad mode is always restored after each test + if (!is_grad_enabled()) disable_no_grad(true); + }); + + describe('requires_grad default', () => { + it('should have requires_grad=true by default (from array)', () => { + const p = new Parameter([1.0, 2.0, 3.0]); + assert.isTrue(p.requires_grad); + }); + + it('should have requires_grad=true by default (from Tensor)', () => { + const t = new Tensor([1.0, 2.0, 3.0]); + const p = new Parameter(t); + assert.isTrue(p.requires_grad); + }); + + it('should have a grad_fn (AccumulateGrad) when requires_grad=true', () => { + const p = new Parameter([1.0, 2.0]); + assert.isNotNull(p.grad_fn); + }); + + it('should allow explicit requires_grad=false', () => { + const p = new Parameter([1.0, 2.0], { requires_grad: false }); + assert.isFalse(p.requires_grad); + }); + }); + + describe('no_grad does NOT affect Parameter creation', () => { + it('Parameter created inside no_grad still has requires_grad=true (from array)', () => { + const p = no_grad(() => new Parameter([1.0, 2.0, 3.0])); + assert.isTrue(p.requires_grad); + }); + + it('Parameter created inside no_grad still has requires_grad=true (from Tensor)', () => { + const t = new Tensor([1.0, 2.0]); + const p = no_grad(() => new Parameter(t)); + assert.isTrue(p.requires_grad); + }); + + it('Parameter created inside no_grad still has a grad_fn', () => { + const p = no_grad(() => new Parameter([1.0, 2.0])); + assert.isNotNull(p.grad_fn); + }); + + it('Parameter created with enable_no_grad/disable_no_grad still has requires_grad=true', () => { + const prev = enable_no_grad(); + const p = new Parameter([1.0, 2.0, 3.0]); + disable_no_grad(prev); + assert.isTrue(p.requires_grad); + }); + + it('grad mode is restored after no_grad block that creates a Parameter', () => { + no_grad(() => new Parameter([1.0])); + assert.isTrue(is_grad_enabled()); + }); + + it('operations on Parameter inside no_grad do not build a graph, but Parameter retains requires_grad', () => { + const p = new Parameter([2.0]); + const result = no_grad(() => p.mul(p)); + // Parameter itself is unaffected by no_grad + assert.isTrue(p.requires_grad); + // But the operation result has no grad + assert.isFalse(result.requires_grad); + assert.isNull(result.grad_fn); + }); + }); +}); diff --git a/test/template.test.js b/test/template.test.js new file mode 100644 index 00000000..22d1158f --- /dev/null +++ b/test/template.test.js @@ -0,0 +1,12 @@ +import { assert } from 'chai'; +import { Tensor } from 'torch'; + +describe('Tensor', () => { + describe('Constructor', () => { + it('should create a tensor with correct data and shape', () => { + const tensor = new Tensor([10, 20, 30]); + assert.deepStrictEqual(tensor.data, [10, 20, 30]); + assert.deepStrictEqual(tensor.shape, [3]); + }); + }); +}); diff --git a/test/tensor.test.js b/test/tensor.test.js new file mode 100644 index 00000000..e603e07f --- /dev/null +++ b/test/tensor.test.js @@ -0,0 +1,522 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; +import { Tensor } from 'torch'; + +describe('Tensor', () => { + describe('Constructor', () => { + it('should create a tensor with correct data and shape', () => { + const tensor = new Tensor([10, 20, 30]); + assert.deepStrictEqual(tensor.data, [10, 20, 30]); + assert.deepStrictEqual(tensor.shape, [3]); + }); + + it('should create a tensor with nested array', () => { + const tensor = new Tensor([ + [1, 2], + [3, 4] + ]); + assert.deepStrictEqual(tensor.data, [1, 2, 3, 4]); + assert.deepStrictEqual(tensor.shape, [2, 2]); + }); + }); + + describe('Shape', () => { + it('should return the shape of the 1D tensor', () => { + const tensor = new Tensor([10, 20, 30]); + assert.deepStrictEqual(tensor.shape, [3]); + }); + + it('should return the shape of the 2D tensor', () => { + const tensor = new Tensor([ + [1, 2, 5], + [3, 4, 6] + ]); + assert.deepStrictEqual(tensor.shape, [2, 3]); + }); + + describe('Reshape', () => { + it('should reshape a tensor', () => { + const t = new Tensor([1, 2, 3, 4]); + const result = t.reshape([2, 2]); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4]); + assert.deepStrictEqual(result.shape, [2, 2]); + }); + + it('should not reshape a tensor if the shape is not compatible', () => { + const t = new Tensor([1, 2, 3, 4]); + assert.throws(() => t.reshape([2, 3]), Error); + }); + + it('should reshape a tensor with different dimensions', () => { + const t = new Tensor([1, 2, 3, 4, 5, 6]); + const result = t.reshape([2, 3]); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result.shape, [2, 3]); + + const result2 = t.reshape([3, 2]); + assert.deepStrictEqual(Array.from(result2.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result2.shape, [3, 2]); + + const result3 = t.reshape([6]); + assert.deepStrictEqual(Array.from(result3.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result3.shape, [6]); + + const result4 = t.reshape([1, 2, 3]); + assert.deepStrictEqual(Array.from(result4.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result4.shape, [1, 2, 3]); + }); + }); + + describe('Unsqueeze', () => { + it('should unsqueeze a tensor', () => { + const t = new Tensor([1, 2, 3]); + let result = t.unsqueeze(0); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3]); + assert.deepStrictEqual(result.shape, [1, 3]); + + let s = new Tensor([1, 2, 3, 4, 5, 6]); + s = s.reshape([2, 3]); + + result = s.unsqueeze(0); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result.shape, [1, 2, 3]); + + result = s.unsqueeze(1); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result.shape, [2, 1, 3]); + + result = s.unsqueeze(2); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result.shape, [2, 3, 1]); + }); + + it('should unsqueeze a tensor with negative dimension', () => { + const t = new Tensor([1, 2, 3]); + let result = t.unsqueeze(-1); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3]); + assert.deepStrictEqual(result.shape, [3, 1]); + + let s = new Tensor([1, 2, 3, 4, 5, 6]); + s = s.reshape([2, 3]); + + result = s.unsqueeze(-3); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result.shape, [1, 2, 3]); + + result = s.unsqueeze(-2); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result.shape, [2, 1, 3]); + + result = s.unsqueeze(-1); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(result.shape, [2, 3, 1]); + }); + }); + }); +}); + +describe('Operations', () => { + describe('Binary Pointwise Operations', () => { + describe('Addition', () => { + it('should add two tensors with same shape', () => { + const t1 = new Tensor([10]); + const t2 = new Tensor([20]); + const result = t1.add(t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [30]); + assert.deepStrictEqual(result.shape, [1]); + }); + + it('should add two 1D tensors of same length', () => { + const t1 = new Tensor([1, 2, 3]); + const t2 = new Tensor([4, 5, 6]); + const result = t1.add(t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [5, 7, 9]); + assert.deepStrictEqual(result.shape, [3]); + }); + }); + + describe('Multiplication', () => { + it('should multiply two tensors with same shape, scalar', () => { + const t1 = new Tensor([10]); + const t2 = new Tensor([20]); + const result = t1.mul(t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [200]); + assert.deepStrictEqual(result.shape, [1]); + }); + + it('should multiply two tensors with same shape, scalar 2', () => { + const t1 = new Tensor([-1.0604]); + const t2 = new Tensor([0.756]); + const result = t1.mul(t2); + + assert.closeTo(Array.from(result.toFlatArray())[0], -0.8016, 0.001); + assert.deepStrictEqual(result.shape, [1]); + }); + + it('should multiply two tensors with the same shape, integers', () => { + const i1 = [ + [2, 3], + [5, 7], + [11, 13] + ]; + const t1 = new Tensor(i1); + + const i2 = [ + [1, 2], + [3, 4], + [5, 6] + ]; + const t2 = new Tensor(i2); + + const result = t1.mul(t2); + const expected = [ + [2, 6], + [15, 28], + [55, 78] + ]; + + for (let i = 0; i < expected.length; i++) { + for (let j = 0; j < expected[i].length; j++) { + assert.closeTo(result.data[i * expected[i].length + j], expected[i][j], 0.001); + } + } + }); + + it('should multiply two tensors with the same shape', () => { + const i1 = [ + [-0.4583, -0.222], + [-1.3351, -1.0604], + [-0.4482, -1.316] + ]; + const t1 = new Tensor(i1); + + const i2 = [ + [0.834, 0.4923], + [0.7729, 0.756], + [0.5616, 0.0999] + ]; + const t2 = new Tensor(i2); + + const result = t1.mul(t2); + const expected = [ + [-0.3822, -0.1093], + [-1.0319, -0.8016], + [-0.2517, -0.1315] + ]; + + for (let i = 0; i < expected.length; i++) { + for (let j = 0; j < expected[i].length; j++) { + assert.closeTo(result.data[i * expected[i].length + j], expected[i][j], 0.001); + } + } + + assert.deepStrictEqual(result.shape, [3, 2]); + }); + }); + }); + + describe('Operations with Broadcasting', () => { + it('should multiply two tensors with different shapes (broadcasting)', () => { + const t1 = new Tensor([10, 20, 30]); + const t2 = new Tensor([1]); + const result = t1.mul(t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [10, 20, 30]); + assert.deepStrictEqual(result.shape, [3]); + }); + it('should add tensors with different shapes (broadcasting)', () => { + const t1 = new Tensor([10, 20, 30]); + const t2 = new Tensor([1]); + const result = t1.add(t2); + + assert.deepStrictEqual(Array.from(result.toFlatArray()), [11, 21, 31]); + assert.deepStrictEqual(result.shape, [3]); + }); + }); + + describe('Unary Operations', () => { + describe('Neg', () => { + it('should negate a tensor', () => { + const t = new Tensor([1, 2, 3, -4]); + const result = t.neg(); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [-1, -2, -3, 4]); + assert.deepStrictEqual(result.shape, [4]); + }); + }); + + describe('Exp', () => { + const input = [1, 2, 3, -4, 2.5, -6.7]; + const t = new Tensor(input); + const result = t.exp(); + const expected = input.map(x => Math.exp(x)); + for (let i = 0; i < expected.length; i++) { + assert.closeTo(result.data[i], expected[i], 0.0001); + } + assert.deepStrictEqual(result.shape, [6]); + }); + }); + + describe('Comparison Operations', () => { + it('le should return 1 where a <= b', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([2, 2, 1]); + const result = a.le(b); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, 1, 0]); + }); + + it('ge should return 1 where a >= b', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([2, 2, 1]); + const result = a.ge(b); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [0, 1, 1]); + }); + + it('ne should return 1 where a != b', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([1, 0, 3]); + const result = a.ne(b); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [0, 1, 0]); + }); + }); + + describe('Fmod', () => { + it('should compute element-wise remainder', () => { + const a = new Tensor([7, -7, 5, 10]); + const b = new Tensor([3, 3, 2, 4]); + const result = a.fmod(b); + assert.deepStrictEqual(Array.from(result.toFlatArray()), [1, -1, 1, 2]); + }); + }); + + describe('Numel', () => { + it('should return the total number of elements', () => { + const t = new Tensor([[1, 2, 3], [4, 5, 6]]); + assert.strictEqual(t.numel(), 6); + }); + }); + + describe('cat', () => { + describe('1D tensors', () => { + it('concatenates two 1D tensors along dim 0', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([4, 5]); + const out = torch.cat([a, b]); + assert.deepStrictEqual(Array.from(out.toFlatArray()), [1, 2, 3, 4, 5]); + assert.deepStrictEqual(out.shape, [5]); + }); + + it('concatenates three 1D tensors', () => { + const a = new Tensor([1]); + const b = new Tensor([2, 3]); + const c = new Tensor([4, 5, 6]); + const out = torch.cat([a, b, c]); + assert.deepStrictEqual(Array.from(out.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(out.shape, [6]); + }); + }); + + describe('2D tensors', () => { + it('concatenates along dim 0 (row-wise)', () => { + const a = new Tensor([[1, 2], [3, 4]]); + const b = new Tensor([[5, 6]]); + const out = torch.cat([a, b], 0); + assert.deepStrictEqual(Array.from(out.toFlatArray()), [1, 2, 3, 4, 5, 6]); + assert.deepStrictEqual(out.shape, [3, 2]); + }); + + it('concatenates along dim 1 (column-wise)', () => { + const a = new Tensor([[1, 2], [3, 4]]); + const b = new Tensor([[5], [6]]); + const out = torch.cat([a, b], 1); + assert.deepStrictEqual(Array.from(out.toFlatArray()), [1, 2, 5, 3, 4, 6]); + assert.deepStrictEqual(out.shape, [2, 3]); + }); + }); + + describe('3D tensors', () => { + it('concatenates along dim 0', () => { + const out = torch.cat([torch.ones([2, 3, 4]), torch.zeros([1, 3, 4])], 0); + assert.deepStrictEqual(out.shape, [3, 3, 4]); + }); + + it('concatenates along dim 2', () => { + const out = torch.cat([torch.ones([2, 3, 4]), torch.ones([2, 3, 2])], 2); + assert.deepStrictEqual(out.shape, [2, 3, 6]); + }); + }); + + describe('negative dim', () => { + it('dim=-1 is the last dimension', () => { + const a = new Tensor([[1, 2], [3, 4]]); + const b = new Tensor([[5], [6]]); + const pos = torch.cat([a, b], 1); + const neg = torch.cat([a, b], -1); + assert.deepStrictEqual(Array.from(neg.toFlatArray()), Array.from(pos.toFlatArray())); + assert.deepStrictEqual(neg.shape, [2, 3]); + }); + + it('dim=-2 on a 3D tensor', () => { + const a = torch.ones([2, 3, 4]); + const b = torch.ones([2, 1, 4]); + const pos = torch.cat([a, b], 1); + const neg = torch.cat([a, b], -2); + assert.deepStrictEqual(neg.shape, pos.shape); + assert.deepStrictEqual(Array.from(neg.toFlatArray()), Array.from(pos.toFlatArray())); + }); + }); + + describe('autograd', () => { + it('grad flows back to each input (1D)', () => { + const x = new Tensor([1, 2, 3], { requires_grad: true }); + const y = new Tensor([4, 5], { requires_grad: true }); + torch.cat([x, y]).sum().backward(); + assert.deepStrictEqual(Array.from(x.grad.toFlatArray()), [1, 1, 1]); + assert.deepStrictEqual(Array.from(y.grad.toFlatArray()), [1, 1]); + }); + + it('grad flows back to each input (2D, dim 0)', () => { + const x = new Tensor([[1, 2], [3, 4]], { requires_grad: true }); + const y = new Tensor([[5, 6]], { requires_grad: true }); + torch.cat([x, y], 0).sum().backward(); + assert.deepStrictEqual(Array.from(x.grad.toFlatArray()), [1, 1, 1, 1]); + assert.deepStrictEqual(Array.from(y.grad.toFlatArray()), [1, 1]); + }); + + it('grad flows back to each input (2D, dim 1)', () => { + const x = new Tensor([[1, 2], [3, 4]], { requires_grad: true }); + const y = new Tensor([[5], [6]], { requires_grad: true }); + torch.cat([x, y], 1).sum().backward(); + assert.deepStrictEqual(Array.from(x.grad.toFlatArray()), [1, 1, 1, 1]); + assert.deepStrictEqual(Array.from(y.grad.toFlatArray()), [1, 1]); + }); + + it('non-uniform upstream gradient is sliced correctly', () => { + const x = new Tensor([[1, 0], [0, 1]], { requires_grad: true }); + const y = new Tensor([[2, 2]], { requires_grad: true }); + const upstream = new Tensor([[1, 2], [3, 4], [5, 6]]); + torch.cat([x, y], 0).mul(upstream).sum().backward(); + assert.deepStrictEqual(Array.from(x.grad.toFlatArray()), [1, 2, 3, 4]); + assert.deepStrictEqual(Array.from(y.grad.toFlatArray()), [5, 6]); + }); + + it('grad only flows to tensors with requires_grad=true', () => { + const x = new Tensor([1, 2, 3], { requires_grad: true }); + const y = new Tensor([4, 5]); + torch.cat([x, y]).sum().backward(); + assert.deepStrictEqual(Array.from(x.grad.toFlatArray()), [1, 1, 1]); + assert.isNull(y.grad); + }); + + it('grad flows through three inputs', () => { + const a = new Tensor([1, 2], { requires_grad: true }); + const b = new Tensor([3], { requires_grad: true }); + const c = new Tensor([4, 5, 6], { requires_grad: true }); + torch.cat([a, b, c]).sum().backward(); + assert.deepStrictEqual(Array.from(a.grad.toFlatArray()), [1, 1]); + assert.deepStrictEqual(Array.from(b.grad.toFlatArray()), [1]); + assert.deepStrictEqual(Array.from(c.grad.toFlatArray()), [1, 1, 1]); + }); + + it('cat result can be used in further computation', () => { + const x = new Tensor([2, 3], { requires_grad: true }); + const y = new Tensor([4], { requires_grad: true }); + torch.cat([x, y]).mul(new Tensor([1, 2, 3])).sum().backward(); + assert.deepStrictEqual(Array.from(x.grad.toFlatArray()), [1, 2]); + assert.deepStrictEqual(Array.from(y.grad.toFlatArray()), [3]); + }); + }); + + describe('tensor.cat method', () => { + it('tensor.cat(other) prepends self', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([4, 5]); + assert.deepStrictEqual(Array.from(a.cat(b).toFlatArray()), [1, 2, 3, 4, 5]); + }); + + it('tensor.cat([b, c]) prepends self before b and c', () => { + const a = new Tensor([1]); + const b = new Tensor([2, 3]); + const c = new Tensor([4]); + assert.deepStrictEqual(Array.from(a.cat([b, c]).toFlatArray()), [1, 2, 3, 4]); + }); + + it('tensor.cat with dim argument', () => { + const a = new Tensor([[1, 2], [3, 4]]); + const b = new Tensor([[5, 6]]); + assert.deepStrictEqual(a.cat(b, 0).shape, [3, 2]); + }); + + it('tensor.cat gradient flows back through self', () => { + const a = new Tensor([1, 2], { requires_grad: true }); + const b = new Tensor([3, 4], { requires_grad: true }); + a.cat(b).sum().backward(); + assert.deepStrictEqual(Array.from(a.grad.toFlatArray()), [1, 1]); + assert.deepStrictEqual(Array.from(b.grad.toFlatArray()), [1, 1]); + }); + }); + + describe('aliases (concatenate, concat)', () => { + it('torch.concatenate produces the same result as torch.cat', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([4, 5]); + assert.deepStrictEqual( + Array.from(torch.concatenate([a, b]).toFlatArray()), + Array.from(torch.cat([a, b]).toFlatArray()) + ); + }); + + it('torch.concat produces the same result as torch.cat', () => { + const a = new Tensor([1, 2, 3]); + const b = new Tensor([4, 5]); + assert.deepStrictEqual( + Array.from(torch.concat([a, b]).toFlatArray()), + Array.from(torch.cat([a, b]).toFlatArray()) + ); + }); + + it('tensor.concatenate is an alias for tensor.cat', () => { + const a = new Tensor([1, 2]); + const b = new Tensor([3]); + assert.deepStrictEqual( + Array.from(a.concatenate(b).toFlatArray()), + Array.from(a.cat(b).toFlatArray()) + ); + }); + + it('tensor.concat is an alias for tensor.cat', () => { + const a = new Tensor([1, 2]); + const b = new Tensor([3]); + assert.deepStrictEqual( + Array.from(a.concat(b).toFlatArray()), + Array.from(a.cat(b).toFlatArray()) + ); + }); + }); + + describe('error handling', () => { + it('throws on empty tensor list', () => { + assert.throws(() => torch.cat([]), /non-empty/); + }); + + it('throws on zero-dimensional tensor', () => { + const a = new Tensor(1); + assert.throws(() => torch.cat([a, a]), /zero-dimensional/); + }); + + it('throws on mismatched ndim', () => { + const a = new Tensor([1, 2]); + const b = new Tensor([[1, 2]]); + assert.throws(() => torch.cat([a, b]), /dimensions/); + }); + + it('throws on mismatched non-cat dimension', () => { + const a = new Tensor([[1, 2, 3]]); + const b = new Tensor([[1, 2]]); + assert.throws(() => torch.cat([a, b], 0), /shape/); + }); + }); + }); +}); diff --git a/test/testcases.gen.js b/test/testcases.gen.js new file mode 100644 index 00000000..ba3deaaa --- /dev/null +++ b/test/testcases.gen.js @@ -0,0 +1,8351 @@ +export const testData = { + "unary": { + "log": [ + { + "input": [ + [0.0, 1.0, -1.0], + [0.23033303022384644, -1.1228563785552979, -0.18632829189300537], + [2.2082014083862305, -0.637997031211853, 0.46165722608566284] + ], + "expected_output": [ + [-Infinity, 0.0, NaN], + [-1.468229055404663, NaN, NaN], + [0.7921783328056335, NaN, -0.7729325890541077] + ], + "expected_grad": [ + [Infinity, 1.0, -1.0], + [4.3415398597717285, -0.8905858397483826, -5.366871356964111], + [0.45285725593566895, -1.5674054622650146, 2.166109323501587] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [1.110290288925171, -1.6897989511489868, -0.9889599084854126], + [0.9579718112945557, 1.322135090827942, 0.8171897530555725] + ], + "expected_output": [ + [-Infinity, 0.0, NaN], + [0.10462149977684021, NaN, NaN], + [-0.04293692484498024, 0.279247909784317, -0.2018839567899704] + ], + "expected_grad": [ + [Infinity, 1.0, -1.0], + [0.9006653428077698, -0.5917863845825195, -1.011163353919983], + [1.0438719987869263, 0.7563523650169373, 1.2237060070037842] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.6863219141960144, -0.32775864005088806, 0.7949687242507935], + [0.2815195620059967, 0.056163541972637177, 0.5227160453796387] + ], + "expected_output": [ + [-Infinity, 0.0, NaN], + [-0.37640848755836487, NaN, -0.22945250570774078], + [-1.2675533294677734, -2.8794875144958496, -0.6487168669700623] + ], + "expected_grad": [ + [Infinity, 1.0, -1.0], + [1.4570422172546387, -3.051025629043579, 1.2579110860824585], + [3.5521509647369385, 17.805145263671875, 1.913084626197815] + ] + } + ], + "sqrt": [ + { + "input": [ + [0.0, 1.0, -1.0], + [-0.008498823270201683, 0.7290605902671814, 0.13314196467399597], + [0.8639776706695557, -1.0156747102737427, -0.8887485265731812] + ], + "expected_output": [ + [0.0, 1.0, NaN], + [NaN, 0.8538504242897034, 0.36488622426986694], + [0.9295039772987366, NaN, NaN] + ], + "expected_grad": [ + [Infinity, 0.5, NaN], + [NaN, 0.5855826735496521, 1.3702901601791382], + [0.5379213094711304, NaN, NaN] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.9912377595901489, 0.4679030478000641, -0.20493191480636597], + [-0.7409071326255798, 0.3618396520614624, 1.9198992252349854] + ], + "expected_output": [ + [0.0, 1.0, NaN], + [0.9956092238426208, 0.6840344071388245, NaN], + [NaN, 0.6015310883522034, 1.3856042623519897] + ], + "expected_grad": [ + [Infinity, 0.5, NaN], + [0.5022050738334656, 0.7309573888778687, NaN], + [NaN, 0.8312122225761414, 0.3608534038066864] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-0.6890136003494263, -1.1267184019088745, -0.2857550382614136], + [-1.0935109853744507, 1.1351022720336914, 0.7592452168464661] + ], + "expected_output": [ + [0.0, 1.0, NaN], + [NaN, NaN, NaN], + [NaN, 1.0654118061065674, 0.8713467717170715] + ], + "expected_grad": [ + [Infinity, 0.5, NaN], + [NaN, NaN, NaN], + [NaN, 0.46930208802223206, 0.5738243460655212] + ] + } + ], + "exp": [ + { + "input": [ + [0.0, 1.0, -1.0], + [0.960340142250061, -0.5671805739402771, -0.5706474184989929], + [1.5980384349822998, 0.11148621141910553, -0.03919669985771179] + ], + "expected_output": [ + [1.0, 2.7182817459106445, 0.3678794503211975], + [2.6125850677490234, 0.5671221613883972, 0.5651594400405884], + [4.943326473236084, 1.1179382801055908, 0.9615615606307983] + ], + "expected_grad": [ + [1.0, 2.7182817459106445, 0.3678794503211975], + [2.6125850677490234, 0.5671221613883972, 0.5651594400405884], + [4.943326473236084, 1.1179382801055908, 0.9615615606307983] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-1.6270242929458618, -1.3951387405395508, -0.23872417211532593], + [-0.5049903988838196, -2.475163221359253, -0.931602954864502] + ], + "expected_output": [ + [1.0, 2.7182817459106445, 0.3678794503211975], + [0.19651347398757935, 0.24779865145683289, 0.7876321077346802], + [0.603511393070221, 0.08414925634860992, 0.39392176270484924] + ], + "expected_grad": [ + [1.0, 2.7182817459106445, 0.3678794503211975], + [0.19651347398757935, 0.24779865145683289, 0.7876321077346802], + [0.603511393070221, 0.08414925634860992, 0.39392176270484924] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-0.09089037775993347, -1.3296922445297241, -0.542582631111145], + [0.5470984578132629, 0.6430637240409851, -0.7904810905456543] + ], + "expected_output": [ + [1.0, 2.7182817459106445, 0.3678794503211975], + [0.9131178259849548, 0.26455867290496826, 0.5812451839447021], + [1.7282311916351318, 1.9023001194000244, 0.45362651348114014] + ], + "expected_grad": [ + [1.0, 2.7182817459106445, 0.3678794503211975], + [0.9131178259849548, 0.26455867290496826, 0.5812451839447021], + [1.7282311916351318, 1.9023001194000244, 0.45362651348114014] + ] + } + ], + "square": [ + { + "input": [ + [0.0, 1.0, -1.0], + [2.117400646209717, -1.711801528930664, 0.16513441503047943], + [1.5818675756454468, 0.44846096634864807, 0.033029891550540924] + ], + "expected_output": [ + [0.0, 1.0, 1.0], + [4.4833855628967285, 2.930264472961426, 0.027269374579191208], + [2.502305030822754, 0.20111723244190216, 0.0010909737320616841] + ], + "expected_grad": [ + [0.0, 2.0, -2.0], + [4.234801292419434, -3.423603057861328, 0.33026883006095886], + [3.1637351512908936, 0.8969219326972961, 0.06605978310108185] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.6130759119987488, 0.7763870358467102, -0.3029157221317291], + [-1.2753024101257324, -0.47575175762176514, 2.383944511413574] + ], + "expected_output": [ + [0.0, 1.0, 1.0], + [0.3758620619773865, 0.602776825428009, 0.09175793826580048], + [1.6263961791992188, 0.22633972764015198, 5.683191299438477] + ], + "expected_grad": [ + [0.0, 2.0, -2.0], + [1.2261518239974976, 1.5527740716934204, -0.6058314442634583], + [-2.550604820251465, -0.9515035152435303, 4.767889022827148] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.39997783303260803, -1.2039215564727783, -0.41975241899490356], + [-1.1928907632827759, -0.9350629448890686, 0.2138027846813202] + ], + "expected_output": [ + [0.0, 1.0, 1.0], + [0.15998226404190063, 1.4494271278381348, 0.17619208991527557], + [1.4229884147644043, 0.8743427395820618, 0.04571162909269333] + ], + "expected_grad": [ + [0.0, 2.0, -2.0], + [0.7999556660652161, -2.4078431129455566, -0.8395048379898071], + [-2.3857815265655518, -1.8701258897781372, 0.4276055693626404] + ] + } + ], + "abs": [ + { + "input": [ + [0.0, 1.0, -1.0], + [0.33552202582359314, 0.2469366192817688, 0.03243076428771019], + [0.40568798780441284, 1.618118405342102, 0.39315488934516907] + ], + "expected_output": [ + [0.0, 1.0, 1.0], + [0.33552202582359314, 0.2469366192817688, 0.03243076428771019], + [0.40568798780441284, 1.618118405342102, 0.39315488934516907] + ], + "expected_grad": [ + [0.0, 1.0, -1.0], + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-0.42791351675987244, -0.5006903409957886, -0.1306709349155426], + [0.9795637726783752, 0.5959771871566772, -1.4723340272903442] + ], + "expected_output": [ + [0.0, 1.0, 1.0], + [0.42791351675987244, 0.5006903409957886, 0.1306709349155426], + [0.9795637726783752, 0.5959771871566772, 1.4723340272903442] + ], + "expected_grad": [ + [0.0, 1.0, -1.0], + [-1.0, -1.0, -1.0], + [1.0, 1.0, -1.0] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.14468061923980713, 1.2818045616149902, -1.5952012538909912], + [-1.064806342124939, 0.10553700476884842, 1.9738837480545044] + ], + "expected_output": [ + [0.0, 1.0, 1.0], + [0.14468061923980713, 1.2818045616149902, 1.5952012538909912], + [1.064806342124939, 0.10553700476884842, 1.9738837480545044] + ], + "expected_grad": [ + [0.0, 1.0, -1.0], + [1.0, 1.0, -1.0], + [-1.0, 1.0, 1.0] + ] + } + ], + "sign": [ + { + "input": [ + [0.0, 1.0, -1.0], + [-0.1562519073486328, -0.6680524945259094, 1.2135050296783447], + [0.7923893332481384, -0.4400617480278015, 0.4995947778224945] + ], + "expected_output": [ + [0.0, 1.0, -1.0], + [-1.0, -1.0, 1.0], + [1.0, -1.0, 1.0] + ], + "expected_grad": [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.7485982179641724, -1.3375409841537476, 0.6449474096298218], + [0.9652354717254639, 1.0089688301086426, -0.033671289682388306] + ], + "expected_output": [ + [0.0, 1.0, -1.0], + [1.0, -1.0, 1.0], + [1.0, 1.0, -1.0] + ], + "expected_grad": [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-1.7460761070251465, -0.37421396374702454, -0.41171613335609436], + [-0.8997476696968079, 0.48207539319992065, 2.5150909423828125] + ], + "expected_output": [ + [0.0, 1.0, -1.0], + [-1.0, -1.0, -1.0], + [-1.0, 1.0, 1.0] + ], + "expected_grad": [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0] + ] + } + ], + "neg": [ + { + "input": [ + [0.0, 1.0, -1.0], + [-0.8023788928985596, 2.924564838409424, -0.7985216975212097], + [-0.5669130086898804, -0.026701264083385468, -1.5459672212600708] + ], + "expected_output": [ + [-0.0, -1.0, 1.0], + [0.8023788928985596, -2.924564838409424, 0.7985216975212097], + [0.5669130086898804, 0.026701264083385468, 1.5459672212600708] + ], + "expected_grad": [ + [-1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-0.1816582977771759, 0.4279658794403076, 0.25454989075660706], + [0.06617925316095352, -1.2704100608825684, 0.667417049407959] + ], + "expected_output": [ + [-0.0, -1.0, 1.0], + [0.1816582977771759, -0.4279658794403076, -0.25454989075660706], + [-0.06617925316095352, 1.2704100608825684, -0.667417049407959] + ], + "expected_grad": [ + [-1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-1.7517130374908447, -0.1309133768081665, 1.437826156616211], + [-0.154391348361969, -0.2853364050388336, -0.1389411836862564] + ], + "expected_output": [ + [-0.0, -1.0, 1.0], + [1.7517130374908447, 0.1309133768081665, -1.437826156616211], + [0.154391348361969, 0.2853364050388336, 0.1389411836862564] + ], + "expected_grad": [ + [-1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0] + ] + } + ], + "reciprocal": [ + { + "input": [ + [0.0, 1.0, -1.0], + [1.2561368942260742, 0.46269556879997253, -0.315053254365921], + [0.5482434034347534, 0.03692331537604332, -0.8128055930137634] + ], + "expected_output": [ + [Infinity, 1.0, -1.0], + [0.7960915565490723, 2.161248207092285, -3.1740665435791016], + [1.8240073919296265, 27.083158493041992, -1.2303065061569214] + ], + "expected_grad": [ + [-Infinity, -1.0, -1.0], + [-0.6337617635726929, -4.670993804931641, -10.074698448181152], + [-3.327003002166748, -733.4974975585938, -1.513654112815857] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [-0.5963875651359558, 0.7354843616485596, 0.0723138153553009], + [1.8586984872817993, -0.3944927752017975, 0.7613471746444702] + ], + "expected_output": [ + [Infinity, 1.0, -1.0], + [-1.6767619848251343, 1.3596482276916504, 13.82861614227295], + [0.5380108952522278, -2.534900665283203, 1.313461184501648] + ], + "expected_grad": [ + [-Infinity, -1.0, -1.0], + [-2.811530828475952, -1.8486433029174805, -191.23062133789062], + [-0.28945571184158325, -6.425721168518066, -1.7251802682876587] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.018282217904925346, -0.049032170325517654, 0.17692552506923676], + [0.8319899439811707, -2.008758783340454, -0.5569353103637695] + ], + "expected_output": [ + [Infinity, 1.0, -1.0], + [54.697959899902344, -20.394773483276367, 5.652095794677734], + [1.2019375562667847, -0.49781984090805054, -1.795540690422058] + ], + "expected_grad": [ + [-Infinity, -1.0, -1.0], + [-2991.86669921875, -415.94677734375, -31.946186065673828], + [-1.444653868675232, -0.24782459437847137, -3.223966360092163] + ] + } + ], + "sin": [ + { + "input": [ + [0.0, 1.0, -1.0], + [0.21935275197029114, 0.5447354912757874, -2.056748628616333], + [0.23443682491779327, -0.34069985151290894, 1.0171456336975098] + ], + "expected_output": [ + [0.0, 0.8414709568023682, -0.8414709568023682], + [0.21759793162345886, 0.5181918740272522, -0.8842305541038513], + [0.23229525983333588, -0.33414679765701294, 0.8506106734275818] + ], + "expected_grad": [ + [1.0, 0.5403023362159729, 0.5403023362159729], + [0.9760385155677795, 0.8552643656730652, -0.4670506417751312], + [0.9726453423500061, 0.9425210356712341, 0.5257960557937622] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [1.1056941747665405, -0.6886897087097168, 0.1491728127002716], + [-1.0009846687316895, -1.501810908317566, 0.00793869886547327] + ], + "expected_output": [ + [0.0, 0.8414709568023682, -0.8414709568023682], + [0.8937757611274719, -0.6355260610580444, 0.14862018823623657], + [-0.8420025706291199, -0.9976214170455933, 0.007938615046441555] + ], + "expected_grad": [ + [1.0, 0.5403023362159729, 0.5403023362159729], + [0.44851410388946533, 0.7720794081687927, 0.9888943433761597], + [0.5394734740257263, 0.0689307153224945, 0.9999684691429138] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.24461546540260315, 1.4372375011444092, -0.706683874130249], + [1.3421580791473389, 0.5234800577163696, -0.26403290033340454] + ], + "expected_output": [ + [0.0, 0.8414709568023682, -0.8414709568023682], + [0.24218325316905975, 0.9910942912101746, -0.649315357208252], + [0.9739759564399719, 0.49989718198776245, -0.26097580790519714] + ], + "expected_grad": [ + [1.0, 0.5403023362159729, 0.5403023362159729], + [0.9702305197715759, 0.13316211104393005, 0.76051926612854], + [0.2266514152288437, 0.8660847544670105, 0.9653453230857849] + ] + } + ], + "cos": [ + { + "input": [ + [0.0, 1.0, -1.0], + [1.3824070692062378, -0.7471941709518433, -2.5696442127227783], + [0.10177755355834961, 1.4807320833206177, 0.29541370272636414] + ], + "expected_output": [ + [1.0, 0.5403023362159729, 0.5403023362159729], + [0.1872768998146057, 0.733598530292511, -0.840847909450531], + [0.9948251247406006, 0.08994252979755402, 0.9566817879676819] + ], + "expected_grad": [ + [-0.0, -0.8414709568023682, 0.8414709568023682], + [-0.9823071956634521, 0.6795830726623535, 0.541271448135376], + [-0.10160192847251892, -0.9959469437599182, -0.29113566875457764] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.6161983609199524, 0.0354895144701004, 1.124637246131897], + [-0.18692852556705475, 0.017339814454317093, -1.3771003484725952] + ], + "expected_output": [ + [1.0, 0.5403023362159729, 0.5403023362159729], + [0.8160814642906189, 0.9993703365325928, 0.4315038025379181], + [0.9825797080993652, 0.9998496770858765, 0.19248706102371216] + ], + "expected_grad": [ + [-0.0, -0.8414709568023682, 0.8414709568023682], + [-0.5779368877410889, -0.03548206388950348, -0.9021111130714417], + [0.18584181368350983, -0.01733894646167755, 0.9812995195388794] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.2835264801979065, -0.34251856803894043, -1.5208441019058228], + [0.11262339353561401, 0.061174698173999786, 0.8015474677085876] + ], + "expected_output": [ + [1.0, 0.5403023362159729, 0.5403023362159729], + [0.9600749015808105, 0.9419117569923401, 0.04993145540356636], + [0.9936646819114685, 0.9981294274330139, 0.6955958008766174] + ], + "expected_grad": [ + [-0.0, -0.8414709568023682, 0.8414709568023682], + [-0.2797430753707886, 0.3358604311943054, 0.9987526535987854], + [-0.11238545924425125, -0.06113654747605324, -0.7184333801269531] + ] + } + ], + "tan": [ + { + "input": [ + [0.0, 1.0, -1.0], + [-0.8692201972007751, 0.06136707961559296, 0.46790215373039246], + [0.43584781885147095, -0.28228023648262024, -1.2118933200836182] + ], + "expected_output": [ + [0.0, 1.5574077367782593, -1.5574077367782593], + [-1.1834511756896973, 0.0614442303776741, 0.5053295493125916], + [0.46571794152259827, -0.2900247275829315, -2.6655936241149902] + ], + "expected_grad": [ + [1.0, 3.425518751144409, 3.425518751144409], + [2.4005565643310547, 1.0037753582000732, 1.2553579807281494], + [1.216893196105957, 1.0841143131256104, 8.105389595031738] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.391975462436676, 1.06540846824646, -0.5663558840751648], + [1.2495359182357788, -0.8697574734687805, -0.25120875239372253] + ], + "expected_output": [ + [0.0, 1.5574077367782593, -1.5574077367782593], + [0.41336604952812195, 1.8072755336761475, -0.6358392238616943], + [3.004908561706543, -1.184741735458374, -0.2566298842430115] + ], + "expected_grad": [ + [1.0, 3.425518751144409, 3.425518751144409], + [1.1708714962005615, 4.266244888305664, 1.4042915105819702], + [10.029475212097168, 2.4036130905151367, 1.0658588409423828] + ] + }, + { + "input": [ + [0.0, 1.0, -1.0], + [0.2562393546104431, -0.5835933685302734, -1.3709070682525635], + [0.23866742849349976, 1.248494029045105, 0.05098257213830948] + ], + "expected_output": [ + [0.0, 1.5574077367782593, -1.5574077367782593], + [0.2619987726211548, -0.6603164076805115, -4.935962200164795], + [0.2433047890663147, 2.9944915771484375, 0.05102679133415222] + ], + "expected_grad": [ + [1.0, 3.425518751144409, 3.425518751144409], + [1.06864333152771, 1.4360177516937256, 25.363723754882812], + [1.059197187423706, 9.96697998046875, 1.0026037693023682] + ] + } + ] + }, + "binary": { + "add": [ + { + "input_x": [ + [0.0, 1.0, -1.0, -1.1133604049682617], + [0.0, 1.0, -1.0, 0.2570752799510956], + [0.6377718448638916, 0.15978877246379852, 1.7697970867156982, 0.6268176436424255], + [-0.4976125955581665, -0.18228091299533844, -0.21200162172317505, 0.8162168264389038] + ], + "input_y": [ + [0.0, 0.0, 1.294582724571228, 0.22267311811447144], + [1.0, 1.0, -0.8326117396354675, -0.8129478096961975], + [-1.0, -1.0, 0.7637977004051208, 1.1792222261428833], + [0.08301948755979538, 0.4220196604728699, 0.3418477773666382, -2.701582431793213] + ], + "expected_output": [ + [0.0, 1.0, 0.294582724571228, -0.8906872868537903], + [1.0, 2.0, -1.8326117992401123, -0.5558725595474243], + [-0.3622281551361084, -0.8402112126350403, 2.533594846725464, 1.806039810180664], + [-0.4145931005477905, 0.23973874747753143, 0.12984615564346313, -1.885365605354309] + ], + "expected_grad_x": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -0.5786357522010803], + [0.0, 1.0, -1.0, 0.3255786597728729], + [0.043896086513996124, 1.4443233013153076, 0.23202891647815704, 0.5064983367919922], + [-0.9177634716033936, 0.42056551575660706, -0.368858277797699, -0.7696145176887512] + ], + "input_y": [ + [0.0, 0.0, -1.190559983253479, 0.7756073474884033], + [1.0, 1.0, -1.881459355354309, 0.5850948691368103], + [-1.0, -1.0, -0.8708454966545105, -0.6410972476005554], + [0.5373784899711609, 0.7817491888999939, 1.0477299690246582, -0.39480558037757874] + ], + "expected_output": [ + [0.0, 1.0, -2.1905598640441895, 0.196971595287323], + [1.0, 2.0, -2.8814592361450195, 0.9106734991073608], + [-0.9561039209365845, 0.4443233013153076, -0.6388165950775146, -0.13459891080856323], + [-0.38038498163223267, 1.2023147344589233, 0.6788716912269592, -1.1644201278686523] + ], + "expected_grad_x": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -0.29864323139190674], + [0.0, 1.0, -1.0, 0.881690502166748], + [-0.518220841884613, 0.83954256772995, 0.05852266773581505, -1.6682480573654175], + [-0.019966987892985344, -1.2131143808364868, 1.197104811668396, -0.7817718386650085] + ], + "input_y": [ + [0.0, 0.0, 0.2794382870197296, -0.5718191266059875], + [1.0, 1.0, -0.37528103590011597, 1.446703553199768], + [-1.0, -1.0, 0.12306158244609833, 0.8757511377334595], + [0.21162617206573486, 1.2271368503570557, -0.5722804665565491, -0.17070035636425018] + ], + "expected_output": [ + [0.0, 1.0, -0.7205617427825928, -0.8704623579978943], + [1.0, 2.0, -1.3752810955047607, 2.3283939361572266], + [-1.5182209014892578, -0.16045743227005005, 0.18158425390720367, -0.792496919631958], + [0.19165918231010437, 0.014022469520568848, 0.6248243451118469, -0.9524722099304199] + ], + "expected_grad_x": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + } + ], + "sub": [ + { + "input_x": [ + [0.0, 1.0, -1.0, 1.541925072669983], + [0.0, 1.0, -1.0, -1.3860632181167603], + [-1.2519514560699463, 3.0250484943389893, 1.3462589979171753, 0.8556069731712341], + [-1.525596261024475, 1.2538059949874878, -0.684340238571167, -2.121647357940674] + ], + "input_y": [ + [0.0, 0.0, -0.4809381365776062, -0.9903622269630432], + [1.0, 1.0, -1.0486466884613037, 1.7823078632354736], + [-1.0, -1.0, 0.9993040561676025, 1.2751853466033936], + [1.2704023122787476, -0.07503493875265121, -2.2949612140655518, -1.3630598783493042] + ], + "expected_output": [ + [0.0, 1.0, -0.5190618634223938, 2.532287359237671], + [-1.0, 0.0, 0.04864668846130371, -3.1683712005615234], + [-0.2519514560699463, 4.02504825592041, 0.34695494174957275, -0.4195783734321594], + [-2.7959985733032227, 1.328840970993042, 1.6106209754943848, -0.7585874795913696] + ], + "expected_grad_x": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, 0.3547132909297943], + [0.0, 1.0, -1.0, 0.22359032928943634], + [0.34287506341934204, -1.4590637683868408, -1.4936561584472656, -0.22138521075248718], + [0.12097510695457458, 0.5535293817520142, -1.9203240871429443, -0.09706467390060425] + ], + "input_y": [ + [0.0, 0.0, -0.8097057938575745, -1.4298604726791382], + [1.0, 1.0, 1.8783389329910278, 0.18154054880142212], + [-1.0, -1.0, -1.2370758056640625, -2.301875114440918], + [0.6071491837501526, 0.034609485417604446, -0.5123788118362427, 0.5574127435684204] + ], + "expected_output": [ + [0.0, 1.0, -0.19029420614242554, 1.7845737934112549], + [-1.0, 0.0, -2.8783388137817383, 0.04204978048801422], + [1.3428750038146973, -0.4590637683868408, -0.2565803527832031, 2.0804898738861084], + [-0.486174076795578, 0.5189198851585388, -1.4079452753067017, -0.6544774174690247] + ], + "expected_grad_x": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, 2.8543806076049805], + [0.0, 1.0, -1.0, -0.18887265026569366], + [0.0696544423699379, -1.6034338474273682, -0.42982181906700134, 0.5761587619781494], + [0.682919979095459, -0.035719022154808044, -0.8881044387817383, -0.5891188979148865] + ], + "input_y": [ + [0.0, 0.0, -0.7190185785293579, -0.38583096861839294], + [1.0, 1.0, 0.09291388839483261, -0.7843634486198425], + [-1.0, -1.0, -0.7668924331665039, -0.9494866728782654], + [0.58314448595047, 0.033020101487636566, -1.2597709894180298, -0.7298099398612976] + ], + "expected_output": [ + [0.0, 1.0, -0.2809814214706421, 3.2402114868164062], + [-1.0, 0.0, -1.0929138660430908, 0.5954908132553101], + [1.0696544647216797, -0.6034338474273682, 0.33707061409950256, 1.5256454944610596], + [0.09977549314498901, -0.06873912364244461, 0.3716665506362915, 0.14069104194641113] + ], + "expected_grad_x": [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0], + [-1.0, -1.0, -1.0, -1.0] + ] + } + ], + "mul": [ + { + "input_x": [ + [0.0, 1.0, -1.0, 1.9025356769561768], + [0.0, 1.0, -1.0, 0.06751081347465515], + [0.7401772737503052, 1.4161995649337769, 0.6833979487419128, -0.13825182616710663], + [0.9863895177841187, -0.3892551064491272, 0.6138074398040771, -0.2786312699317932] + ], + "input_y": [ + [0.0, 0.0, 0.19955870509147644, -1.2461947202682495], + [1.0, 1.0, -0.5218439698219299, 0.8302035331726074], + [-1.0, -1.0, -1.2894343137741089, 1.0762810707092285], + [-2.2878637313842773, -0.48340824246406555, 0.5599985718727112, -1.0602329969406128] + ], + "expected_output": [ + [0.0, 0.0, -0.19955870509147644, -2.3709299564361572], + [0.0, 1.0, 0.5218439698219299, 0.0560477152466774], + [-0.7401772737503052, -1.4161995649337769, -0.8811967372894287, -0.14879782497882843], + [-2.2567248344421387, 0.18816912174224854, 0.3437312841415405, 0.2954140603542328] + ], + "expected_grad_x": [ + [0.0, 0.0, 0.19955870509147644, -1.2461947202682495], + [1.0, 1.0, -0.5218439698219299, 0.8302035331726074], + [-1.0, -1.0, -1.2894343137741089, 1.0762810707092285], + [-2.2878637313842773, -0.48340824246406555, 0.5599985718727112, -1.0602329969406128] + ], + "expected_grad_y": [ + [0.0, 1.0, -1.0, 1.9025356769561768], + [0.0, 1.0, -1.0, 0.06751081347465515], + [0.7401772737503052, 1.4161995649337769, 0.6833979487419128, -0.13825182616710663], + [0.9863895177841187, -0.3892551064491272, 0.6138074398040771, -0.2786312699317932] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -0.7554785013198853], + [0.0, 1.0, -1.0, -0.008815222419798374], + [-1.6514869928359985, 2.1477253437042236, -0.6604134440422058, 0.11352583765983582], + [0.35569247603416443, 1.2056455612182617, 1.3690308332443237, -0.6949581503868103] + ], + "input_y": [ + [0.0, 0.0, -1.2322070598602295, -0.598077118396759], + [1.0, 1.0, -0.23776796460151672, -0.1013922467827797], + [-1.0, -1.0, -0.595004141330719, 0.3926331400871277], + [-1.4313503503799438, -0.039798520505428314, -0.5003415942192078, 0.2930373251438141] + ], + "expected_output": [ + [0.0, 0.0, 1.2322070598602295, 0.45183441042900085], + [0.0, 1.0, 0.23776796460151672, 0.0008937952225096524], + [1.6514869928359985, -2.1477253437042236, 0.3929487466812134, 0.044574007391929626], + [-0.509120523929596, -0.04798290878534317, -0.6849830746650696, -0.2036486715078354] + ], + "expected_grad_x": [ + [0.0, 0.0, -1.2322070598602295, -0.598077118396759], + [1.0, 1.0, -0.23776796460151672, -0.1013922467827797], + [-1.0, -1.0, -0.595004141330719, 0.3926331400871277], + [-1.4313503503799438, -0.039798520505428314, -0.5003415942192078, 0.2930373251438141] + ], + "expected_grad_y": [ + [0.0, 1.0, -1.0, -0.7554785013198853], + [0.0, 1.0, -1.0, -0.008815222419798374], + [-1.6514869928359985, 2.1477253437042236, -0.6604134440422058, 0.11352583765983582], + [0.35569247603416443, 1.2056455612182617, 1.3690308332443237, -0.6949581503868103] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -0.13413065671920776], + [0.0, 1.0, -1.0, -1.5807840824127197], + [-0.15693345665931702, 1.138344645500183, -0.2505214214324951, 1.6704555749893188], + [1.8887592554092407, -0.7538138628005981, -1.7142589092254639, -2.2069785594940186] + ], + "input_y": [ + [0.0, 0.0, 0.49674081802368164, -2.369169235229492], + [1.0, 1.0, -0.8810284733772278, 0.34835284948349], + [-1.0, -1.0, -0.9705761075019836, 1.4723693132400513], + [-1.3775256872177124, -1.204437255859375, -0.48241984844207764, -0.8164191246032715] + ], + "expected_output": [ + [0.0, 0.0, -0.49674081802368164, 0.31777822971343994], + [0.0, 1.0, 0.8810284733772278, -0.5506706237792969], + [0.15693345665931702, -1.138344645500183, 0.24315010011196136, 2.4595274925231934], + [-2.6018145084381104, 0.9079214930534363, 0.8269925117492676, 1.8018195629119873] + ], + "expected_grad_x": [ + [0.0, 0.0, 0.49674081802368164, -2.369169235229492], + [1.0, 1.0, -0.8810284733772278, 0.34835284948349], + [-1.0, -1.0, -0.9705761075019836, 1.4723693132400513], + [-1.3775256872177124, -1.204437255859375, -0.48241984844207764, -0.8164191246032715] + ], + "expected_grad_y": [ + [0.0, 1.0, -1.0, -0.13413065671920776], + [0.0, 1.0, -1.0, -1.5807840824127197], + [-0.15693345665931702, 1.138344645500183, -0.2505214214324951, 1.6704555749893188], + [1.8887592554092407, -0.7538138628005981, -1.7142589092254639, -2.2069785594940186] + ] + } + ], + "div": [ + { + "input_x": [ + [0.0, 1.0, -1.0, -0.5379461050033569], + [0.0, 1.0, -1.0, -0.3353029191493988], + [0.9199973940849304, -0.3787555396556854, -1.5597758293151855, -0.8009540438652039], + [0.35879161953926086, 1.286241054534912, 0.8211396932601929, 0.9001851081848145] + ], + "input_y": [ + [0.0, 0.0, 1.875388741493225, 1.1042989492416382], + [1.0, 1.0, 0.23035314679145813, -0.36578282713890076], + [-1.0, -1.0, 1.5921050310134888, -0.41619211435317993], + [-0.7509436011314392, -1.4861301183700562, -0.03333820775151253, 1.199822187423706] + ], + "expected_output": [ + [NaN, Infinity, -0.5332227945327759, -0.48713812232017517], + [0.0, 1.0, -4.341160774230957, 0.9166721105575562], + [-0.9199973940849304, 0.3787555396556854, -0.9796940684318542, 1.9244815111160278], + [-0.47778770327568054, -0.8654969334602356, -24.63058853149414, 0.7502654194831848] + ], + "expected_grad_x": [ + [Infinity, Infinity, 0.5332227945327759, 0.9055519104003906], + [1.0, 1.0, 4.341160774230957, -2.7338626384735107], + [-1.0, -1.0, 0.628099262714386, -2.4027364253997803], + [-1.331657886505127, -0.6728885769844055, -29.99561309814453, 0.833456814289093] + ], + "expected_grad_y": [ + [NaN, -Infinity, 0.28432655334472656, 0.44112884998321533], + [-0.0, -1.0, 18.84567642211914, 2.5060555934906006], + [-0.9199973940849304, 0.3787555396556854, 0.6153451204299927, 4.624022006988525], + [-0.6362497806549072, -0.5823830366134644, -738.8096313476562, -0.6253138184547424] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, 0.32332202792167664], + [0.0, 1.0, -1.0, -0.3372461199760437], + [-0.08114803582429886, 1.2586976289749146, 0.8691263794898987, -0.9609367251396179], + [0.3896495997905731, 0.378065288066864, -0.13722474873065948, 1.1604512929916382] + ], + "input_y": [ + [0.0, 0.0, -0.22963792085647583, -0.40709349513053894], + [1.0, 1.0, 0.8916962146759033, 0.7001703381538391], + [-1.0, -1.0, 0.9187757968902588, 0.6662830114364624], + [0.22452101111412048, -0.9745140671730042, 0.9816727638244629, 0.5836943984031677] + ], + "expected_output": [ + [NaN, Infinity, 4.354681491851807, -0.7942205667495728], + [0.0, 1.0, -1.1214581727981567, -0.48166295886039734], + [0.08114803582429886, -1.2586976289749146, 0.9459613561630249, -1.4422351121902466], + [1.7354705333709717, -0.38795262575149536, -0.13978664577007294, 1.9881144762039185] + ], + "expected_grad_x": [ + [Infinity, Infinity, -4.354681491851807, -2.4564380645751953], + [1.0, 1.0, 1.1214581727981567, 1.4282238483428955], + [-1.0, -1.0, 1.0884047746658325, 1.5008636713027954], + [4.453926086425781, -1.026152491569519, 1.0186693668365479, 1.713225245475769] + ], + "expected_grad_y": [ + [NaN, -Infinity, 18.9632511138916, -1.950953722000122], + [-0.0, -1.0, 1.257668375968933, 0.6879225373268127], + [0.08114803582429886, -1.2586976289749146, -1.0295889377593994, 2.1645984649658203], + [-7.7296576499938965, -0.39809852838516235, 0.14239637553691864, -3.406087875366211] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -0.7911050319671631], + [0.0, 1.0, -1.0, -0.17465536296367645], + [0.34005025029182434, 1.4557304382324219, -0.3461014926433563, -0.2633814215660095], + [1.5014268159866333, -0.033501043915748596, 1.1346064805984497, 0.14365693926811218] + ], + "input_y": [ + [0.0, 0.0, 1.3061575889587402, -0.9275763630867004], + [1.0, 1.0, 1.2907683849334717, 1.5468223094940186], + [-1.0, -1.0, -0.6493158936500549, 0.45214036107063293], + [1.2459663152694702, -0.8647340536117554, -0.8475469946861267, -0.02352304570376873] + ], + "expected_output": [ + [NaN, Infinity, -0.7656043767929077, 0.8528732061386108], + [0.0, 1.0, -0.7747323513031006, -0.1129123643040657], + [-0.34005025029182434, -1.4557304382324219, 0.5330248475074768, -0.582521378993988], + [1.2050299644470215, 0.03874144330620766, -1.338694453239441, -6.107072353363037] + ], + "expected_grad_x": [ + [Infinity, Infinity, 0.7656043767929077, -1.0780783891677856], + [1.0, 1.0, 0.7747323513031006, 0.6464866399765015], + [-1.0, -1.0, -1.5400824546813965, 2.211702585220337], + [0.8025898933410645, -1.156424880027771, -1.1798756122589111, -42.51150131225586] + ], + "expected_grad_y": [ + [NaN, -Infinity, 0.5861501097679138, 0.9194641709327698], + [-0.0, -1.0, 0.6002101898193359, 0.0729963406920433], + [-0.34005025029182434, -1.4557304382324219, 0.8209022283554077, 1.288364052772522], + [-0.9671449065208435, 0.04480157047510147, -1.5794929265975952, -259.6208190917969] + ] + } + ], + "pow": [ + { + "input_x": [ + [0.0, 1.0, -1.0, -0.35899198055267334], + [0.0, 1.0, -1.0, 0.013965161517262459], + [-1.3923954963684082, 0.540465772151947, 0.4350730776786804, -2.2717032432556152], + [0.06991618871688843, 0.5178828239440918, -0.3457261919975281, 1.3411089181900024] + ], + "input_y": [ + [0.0, 0.0, -0.5522539615631104, -0.09342008084058762], + [1.0, 1.0, -0.5081791877746582, 1.1477830410003662], + [-1.0, -1.0, 0.9821351766586304, 0.22690092027187347], + [1.563033103942871, 0.5983186960220337, -0.5407329201698303, 0.7841619849205017] + ], + "expected_output": [ + [1.0, 1.0, NaN, NaN], + [0.0, 1.0, NaN, 0.007428741082549095], + [-0.7181867361068726, 1.8502559661865234, 0.44159001111984253, NaN], + [0.01563280075788498, 0.6745581030845642, NaN, 1.2587876319885254] + ], + "expected_grad_x": [ + [0.0, 0.0, NaN, NaN], + [1.0, 1.0, NaN, 0.6105610132217407], + [-0.515792191028595, -3.4234468936920166, 0.9968464374542236, NaN], + [0.3494839370250702, 0.7793282866477966, NaN, 0.7360277771949768] + ], + "expected_grad_y": [ + [0.0, 0.0, NaN, NaN], + [0.0, 0.0, NaN, -0.0317295640707016], + [NaN, -1.1385067701339722, -0.3675094544887543, NaN], + [-0.041590411216020584, -0.4438634514808655, NaN, 0.3694501519203186] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, 0.7011352777481079], + [0.0, 1.0, -1.0, 0.46809425950050354], + [0.1417587399482727, 1.1084681749343872, 0.5544233322143555, 1.5817502737045288], + [-0.5585249066352844, 0.6636945605278015, -1.7024588584899902, 1.669644832611084] + ], + "input_y": [ + [0.0, 0.0, -1.6634936332702637, -0.7650561332702637], + [1.0, 1.0, -1.1787039041519165, -2.1400885581970215], + [-1.0, -1.0, -0.07032525539398193, 0.27448296546936035], + [0.5569454431533813, -0.23220781981945038, -0.05974622070789337, -1.666461706161499] + ], + "expected_output": [ + [1.0, 1.0, NaN, 1.3121100664138794], + [0.0, 1.0, NaN, 5.075933456420898], + [7.054238796234131, 0.9021458625793457, 1.0423519611358643, 1.1341224908828735], + [NaN, 1.0998674631118774, NaN, 0.4256037771701813] + ], + "expected_grad_x": [ + [0.0, 0.0, NaN, -1.4317320585250854], + [1.0, 1.0, NaN, -23.206750869750977], + [-49.76228332519531, -0.8138672113418579, -0.13221606612205505, 0.19680559635162354], + [NaN, -0.38481229543685913, NaN, -0.4247923791408539] + ], + "expected_grad_y": [ + [0.0, 0.0, NaN, -0.4658704996109009], + [0.0, 0.0, NaN, -3.8530678749084473], + [-13.781363487243652, 0.09290211647748947, -0.6148070693016052, 0.5200314521789551], + [NaN, -0.4508722424507141, NaN, 0.21816913783550262] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -1.5196866989135742], + [0.0, 1.0, -1.0, 0.9274609088897705], + [0.7894347310066223, 0.7824702858924866, -0.06465863436460495, -0.0002302070497535169], + [0.569308340549469, 0.7476243376731873, 2.1336774826049805, 0.5014525055885315] + ], + "input_y": [ + [0.0, 0.0, 1.4614392518997192, 1.0566132068634033], + [1.0, 1.0, -0.2809409499168396, -0.3604622483253479], + [-1.0, -1.0, -1.7659958600997925, -2.5379507541656494], + [-0.03343662992119789, -1.7016695737838745, 0.5863364338874817, -1.7526601552963257] + ], + "expected_output": [ + [1.0, 1.0, NaN, NaN], + [0.0, 1.0, NaN, 1.02751624584198], + [1.2667291164398193, 1.2780038118362427, NaN, NaN], + [1.0190144777297974, 1.640397310256958, 1.5594813823699951, 3.3527045249938965] + ], + "expected_grad_x": [ + [0.0, 0.0, NaN, NaN], + [1.0, 1.0, NaN, -0.39934927225112915], + [-1.6046028137207031, -1.6332937479019165, NaN, NaN], + [-0.05984878167510033, -3.733712673187256, 0.42854681611061096, -11.71826171875] + ], + "expected_grad_y": [ + [0.0, 0.0, NaN, NaN], + [0.0, 0.0, NaN, -0.07737673819065094], + [-0.29950305819511414, -0.3134934902191162, NaN, NaN], + [-0.5740445852279663, -0.4771172106266022, 1.1818482875823975, -2.314192295074463] + ] + } + ], + "maximum": [ + { + "input_x": [ + [0.0, 1.0, -1.0, 0.2491273283958435], + [0.0, 1.0, -1.0, 0.0604986697435379], + [-0.18495284020900726, -1.0380902290344238, -0.10130416601896286, -0.9271824359893799], + [0.734416127204895, 0.030971217900514603, -0.5865293741226196, -0.3154546320438385] + ], + "input_y": [ + [0.0, 0.0, 1.2309634685516357, 1.2286686897277832], + [1.0, 1.0, -1.2784851789474487, -1.269239068031311], + [-1.0, -1.0, 1.8989076614379883, -0.04056643322110176], + [0.6467097401618958, -2.081273078918457, -0.9303572773933411, -1.3949681520462036] + ], + "expected_output": [ + [0.0, 1.0, 1.2309634685516357, 1.2286686897277832], + [1.0, 1.0, -1.0, 0.0604986697435379], + [-0.18495284020900726, -1.0, 1.8989076614379883, -0.04056643322110176], + [0.734416127204895, 0.030971217900514603, -0.5865293741226196, -0.3154546320438385] + ], + "expected_grad_x": [ + [0.5, 1.0, 0.0, 0.0], + [0.0, 0.5, 1.0, 1.0], + [1.0, 0.0, 0.0, 0.0], + [1.0, 1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [0.5, 0.0, 1.0, 1.0], + [1.0, 0.5, 0.0, 0.0], + [0.0, 1.0, 1.0, 1.0], + [0.0, 0.0, 0.0, 0.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -0.10754015296697617], + [0.0, 1.0, -1.0, 0.3934462368488312], + [0.9626035690307617, -1.1049346923828125, -0.7909473180770874, -0.21609316766262054], + [0.8574033975601196, 1.145952820777893, -0.11311652511358261, 1.409318447113037] + ], + "input_y": [ + [0.0, 0.0, 1.6020095348358154, -1.0371782779693604], + [1.0, 1.0, 0.11150163412094116, 1.7073947191238403], + [-1.0, -1.0, -0.06464217603206635, 3.445625066757202], + [0.36726734042167664, -0.39192530512809753, 0.1605725735425949, 2.2373695373535156] + ], + "expected_output": [ + [0.0, 1.0, 1.6020095348358154, -0.10754015296697617], + [1.0, 1.0, 0.11150163412094116, 1.7073947191238403], + [0.9626035690307617, -1.0, -0.06464217603206635, 3.445625066757202], + [0.8574033975601196, 1.145952820777893, 0.1605725735425949, 2.2373695373535156] + ], + "expected_grad_x": [ + [0.5, 1.0, 0.0, 1.0], + [0.0, 0.5, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0], + [1.0, 1.0, 0.0, 0.0] + ], + "expected_grad_y": [ + [0.5, 0.0, 1.0, 0.0], + [1.0, 0.5, 1.0, 1.0], + [0.0, 1.0, 1.0, 1.0], + [0.0, 0.0, 1.0, 1.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, 0.32681646943092346], + [0.0, 1.0, -1.0, -0.2612513303756714], + [-1.3136197328567505, -0.6061143279075623, 0.6449755430221558, -0.24771355092525482], + [-0.9767715930938721, -0.443967342376709, 0.7373694777488708, -0.13394701480865479] + ], + "input_y": [ + [0.0, 0.0, 0.25832492113113403, -2.4340736865997314], + [1.0, 1.0, -2.0726318359375, -2.1240861415863037], + [-1.0, -1.0, 0.4093967378139496, -0.5840954780578613], + [1.0092873573303223, 1.0439283847808838, 0.29792049527168274, 0.8381712436676025] + ], + "expected_output": [ + [0.0, 1.0, 0.25832492113113403, 0.32681646943092346], + [1.0, 1.0, -1.0, -0.2612513303756714], + [-1.0, -0.6061143279075623, 0.6449755430221558, -0.24771355092525482], + [1.0092873573303223, 1.0439283847808838, 0.7373694777488708, 0.8381712436676025] + ], + "expected_grad_x": [ + [0.5, 1.0, 0.0, 1.0], + [0.0, 0.5, 1.0, 1.0], + [0.0, 1.0, 1.0, 1.0], + [0.0, 0.0, 1.0, 0.0] + ], + "expected_grad_y": [ + [0.5, 0.0, 1.0, 0.0], + [1.0, 0.5, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0], + [1.0, 1.0, 0.0, 1.0] + ] + } + ], + "minimum": [ + { + "input_x": [ + [0.0, 1.0, -1.0, 1.4483332633972168], + [0.0, 1.0, -1.0, -0.8552408218383789], + [-0.25843867659568787, -0.7501540780067444, 1.2354754209518433, 1.0141247510910034], + [-0.6535475254058838, 1.4623208045959473, 0.1203368678689003, 0.7514159083366394] + ], + "input_y": [ + [0.0, 0.0, -0.8768263459205627, -1.5509816408157349], + [1.0, 1.0, -0.20693671703338623, 0.44715985655784607], + [-1.0, -1.0, 0.21129246056079865, 1.4860185384750366], + [2.121837854385376, 0.4178215563297272, 0.44609254598617554, -0.02666892670094967] + ], + "expected_output": [ + [0.0, 0.0, -1.0, -1.5509816408157349], + [0.0, 1.0, -1.0, -0.8552408218383789], + [-1.0, -1.0, 0.21129246056079865, 1.0141247510910034], + [-0.6535475254058838, 0.4178215563297272, 0.1203368678689003, -0.02666892670094967] + ], + "expected_grad_x": [ + [0.5, 0.0, 1.0, 0.0], + [1.0, 0.5, 1.0, 1.0], + [0.0, 0.0, 0.0, 1.0], + [1.0, 0.0, 1.0, 0.0] + ], + "expected_grad_y": [ + [0.5, 1.0, 0.0, 1.0], + [0.0, 0.5, 0.0, 0.0], + [1.0, 1.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 1.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, 1.0925219058990479], + [0.0, 1.0, -1.0, -0.24391917884349823], + [-0.783242404460907, -0.26778313517570496, 1.5684525966644287, -0.283514142036438], + [-1.465786099433899, -0.06292983889579773, -2.527074098587036, 0.47731438279151917] + ], + "input_y": [ + [0.0, 0.0, -0.06058019772171974, -0.5304896831512451], + [1.0, 1.0, 1.6892662048339844, -0.6297198534011841], + [-1.0, -1.0, -1.4458993673324585, -0.31466683745384216], + [-1.242173194885254, -0.3340345025062561, -1.5728429555892944, 2.810397148132324] + ], + "expected_output": [ + [0.0, 0.0, -1.0, -0.5304896831512451], + [0.0, 1.0, -1.0, -0.6297198534011841], + [-1.0, -1.0, -1.4458993673324585, -0.31466683745384216], + [-1.465786099433899, -0.3340345025062561, -2.527074098587036, 0.47731438279151917] + ], + "expected_grad_x": [ + [0.5, 0.0, 1.0, 0.0], + [1.0, 0.5, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [0.5, 1.0, 0.0, 1.0], + [0.0, 0.5, 0.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [0.0, 1.0, 0.0, 0.0] + ] + }, + { + "input_x": [ + [0.0, 1.0, -1.0, -2.0370566844940186], + [0.0, 1.0, -1.0, 0.9563636779785156], + [-0.8331825137138367, -0.47921040654182434, 0.2998451292514801, 0.721377432346344], + [0.7283791899681091, -0.34462252259254456, 0.4790245592594147, 0.024238986894488335] + ], + "input_y": [ + [0.0, 0.0, 0.43399056792259216, 1.1234275102615356], + [1.0, 1.0, 0.2529439628124237, 0.12658123672008514], + [-1.0, -1.0, 0.825295627117157, 2.268347978591919], + [-0.06611745059490204, -1.0740467309951782, 1.3753548860549927, -0.3824302554130554] + ], + "expected_output": [ + [0.0, 0.0, -1.0, -2.0370566844940186], + [0.0, 1.0, -1.0, 0.12658123672008514], + [-1.0, -1.0, 0.2998451292514801, 0.721377432346344], + [-0.06611745059490204, -1.0740467309951782, 0.4790245592594147, -0.3824302554130554] + ], + "expected_grad_x": [ + [0.5, 0.0, 1.0, 1.0], + [1.0, 0.5, 1.0, 0.0], + [0.0, 0.0, 1.0, 1.0], + [0.0, 0.0, 1.0, 0.0] + ], + "expected_grad_y": [ + [0.5, 1.0, 0.0, 0.0], + [0.0, 0.5, 0.0, 1.0], + [1.0, 1.0, 0.0, 0.0], + [1.0, 1.0, 0.0, 1.0] + ] + } + ] + }, + "broadcasting": [ + { + "test_name": "broadcast_add_(2, 3)_and_(2, 3)", + "op_name": "add", + "input_x": [ + [-0.017705317586660385, -0.843208909034729, 0.06224742531776428], + [0.4080433249473572, -1.2516316175460815, -1.2717373371124268] + ], + "input_y": [ + [0.8055283427238464, 1.0920584201812744, 0.8560135960578918], + [1.5192112922668457, 0.44675812125205994, -0.2566242218017578] + ], + "expected_output": [ + [0.7878230214118958, 0.2488495111465454, 0.9182610511779785], + [1.9272546768188477, -0.8048734664916992, -1.5283615589141846] + ], + "expected_grad_x": [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ], + "expected_grad_y": [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ] + }, + { + "test_name": "broadcast_mul_(2, 3)_and_(2, 3)", + "op_name": "mul", + "input_x": [ + [1.7753183841705322, -0.7927579879760742, -1.6142888069152832], + [0.31103116273880005, 1.7946441173553467, 3.19868803024292] + ], + "input_y": [ + [0.2567588686943054, 0.8679211735725403, -1.1851390600204468], + [0.7940642833709717, 0.19724640250205994, 0.622898280620575] + ], + "expected_output": [ + [0.4558287262916565, -0.6880514621734619, 1.9131567478179932], + [0.2469787299633026, 0.35398709774017334, 1.9924572706222534] + ], + "expected_grad_x": [ + [0.2567588686943054, 0.8679211735725403, -1.1851390600204468], + [0.7940642833709717, 0.19724640250205994, 0.622898280620575] + ], + "expected_grad_y": [ + [1.7753183841705322, -0.7927579879760742, -1.6142888069152832], + [0.31103116273880005, 1.7946441173553467, 3.19868803024292] + ] + }, + { + "test_name": "broadcast_add_scalar_and_(2, 2)", + "op_name": "add", + "input_x": 1.5, + "input_y": [ + [1.186416506767273, 1.3099017143249512], + [-2.1073341369628906, -0.22107578814029694] + ], + "expected_output": [ + [2.6864166259765625, 2.809901714324951], + [-0.6073341369628906, 1.2789242267608643] + ], + "expected_grad_x": 4.0, + "expected_grad_y": [ + [1.0, 1.0], + [1.0, 1.0] + ] + }, + { + "test_name": "broadcast_mul_scalar_and_(2, 2)", + "op_name": "mul", + "input_x": 1.5, + "input_y": [ + [0.9579378366470337, -1.3392482995986938], + [-1.3702017068862915, -0.32176926732063293] + ], + "expected_output": [ + [1.4369068145751953, -2.0088725090026855], + [-2.055302619934082, -0.4826539158821106] + ], + "expected_grad_x": -2.0732815265655518, + "expected_grad_y": [ + [1.5, 1.5], + [1.5, 1.5] + ] + }, + { + "test_name": "broadcast_add_(2, 3, 4, 1)_and_(3, 1, 1)", + "op_name": "add", + "input_x": [ + [ + [ + [-1.9291036128997803], + [0.03497670218348503], + [-0.4833625555038452], + [-1.2260730266571045] + ], + [ + [-1.6395642757415771], + [0.2299439013004303], + [-0.44963574409484863], + [-0.40768420696258545] + ], + [ + [-0.011143443174660206], + [1.8151899576187134], + [0.6066091060638428], + [0.5963938236236572] + ] + ], + [ + [ + [0.5361921191215515], + [1.2350000143051147], + [-0.21214154362678528], + [1.387345790863037] + ], + [ + [-0.6384463906288147], + [0.737216591835022], + [1.1485421657562256], + [0.725890040397644] + ], + [ + [-0.29366371035575867], + [-0.6286743879318237], + [-0.04226749762892723], + [-0.27004849910736084] + ] + ] + ], + "input_y": [ + [ + [0.1034090593457222] + ], + [ + [-0.5241618156433105] + ], + [ + [-0.8587943315505981] + ] + ], + "expected_output": [ + [ + [ + [-1.8256945610046387], + [0.13838575780391693], + [-0.3799535036087036], + [-1.122663974761963] + ], + [ + [-2.1637260913848877], + [-0.29421791434288025], + [-0.9737975597381592], + [-0.931846022605896] + ], + [ + [-0.8699377775192261], + [0.9563956260681152], + [-0.25218522548675537], + [-0.2624005079269409] + ] + ], + [ + [ + [0.6396011710166931], + [1.3384090662002563], + [-0.10873248428106308], + [1.4907548427581787] + ], + [ + [-1.1626081466674805], + [0.21305477619171143], + [0.624380350112915], + [0.2017282247543335] + ], + [ + [-1.1524580717086792], + [-1.4874687194824219], + [-0.9010618329048157], + [-1.128842830657959] + ] + ] + ], + "expected_grad_x": [ + [ + [ + [1.0], + [1.0], + [1.0], + [1.0] + ], + [ + [1.0], + [1.0], + [1.0], + [1.0] + ], + [ + [1.0], + [1.0], + [1.0], + [1.0] + ] + ], + [ + [ + [1.0], + [1.0], + [1.0], + [1.0] + ], + [ + [1.0], + [1.0], + [1.0], + [1.0] + ], + [ + [1.0], + [1.0], + [1.0], + [1.0] + ] + ] + ], + "expected_grad_y": [ + [ + [8.0] + ], + [ + [8.0] + ], + [ + [8.0] + ] + ] + }, + { + "test_name": "broadcast_mul_(2, 3, 4, 1)_and_(3, 1, 1)", + "op_name": "mul", + "input_x": [ + [ + [ + [2.8818862438201904], + [-1.1672022342681885], + [1.9413354396820068], + [-1.163609266281128] + ], + [ + [-1.5966553688049316], + [0.08320564776659012], + [-0.9222075343132019], + [-0.3711417317390442] + ], + [ + [0.1490848809480667], + [-0.2603190541267395], + [-0.7125067710876465], + [-0.7577925324440002] + ] + ], + [ + [ + [-0.6056803464889526], + [-0.001647569122724235], + [-0.48284876346588135], + [-1.0801341533660889] + ], + [ + [0.9042091369628906], + [-0.03959815204143524], + [0.3362662196159363], + [-0.9519665837287903] + ], + [ + [-0.08652620017528534], + [0.832058310508728], + [0.9070028066635132], + [0.03912770003080368] + ] + ] + ], + "input_y": [ + [ + [-1.5896711349487305] + ], + [ + [-0.040549930185079575] + ], + [ + [1.9009761810302734] + ] + ], + "expected_output": [ + [ + [ + [-4.58125114440918], + [1.855467677116394], + [-3.0860848426818848], + [1.849756121635437] + ], + [ + [0.0647442638874054], + [-0.0033739833161234856], + [0.037395451217889786], + [0.015049771405756474] + ], + [ + [0.2834067940711975], + [-0.49486032128334045], + [-1.3544584512710571], + [-1.4405455589294434] + ] + ], + [ + [ + [0.9628325700759888], + [0.0026190930511802435], + [0.7675707340240479], + [1.7170580625534058] + ], + [ + [-0.03666561841964722], + [0.001605702331289649], + [-0.013635572046041489], + [0.03860217705368996] + ], + [ + [-0.16448424756526947], + [1.5817229747772217], + [1.7241907119750977], + [0.07438082247972488] + ] + ] + ], + "expected_grad_x": [ + [ + [ + [-1.5896711349487305], + [-1.5896711349487305], + [-1.5896711349487305], + [-1.5896711349487305] + ], + [ + [-0.040549930185079575], + [-0.040549930185079575], + [-0.040549930185079575], + [-0.040549930185079575] + ], + [ + [1.9009761810302734], + [1.9009761810302734], + [1.9009761810302734], + [1.9009761810302734] + ] + ], + [ + [ + [-1.5896711349487305], + [-1.5896711349487305], + [-1.5896711349487305], + [-1.5896711349487305] + ], + [ + [-0.040549930185079575], + [-0.040549930185079575], + [-0.040549930185079575], + [-0.040549930185079575] + ], + [ + [1.9009761810302734], + [1.9009761810302734], + [1.9009761810302734], + [1.9009761810302734] + ] + ] + ], + "expected_grad_y": [ + [ + [0.3220992088317871] + ], + [ + [-2.5578882694244385] + ], + [ + [0.11012923717498779] + ] + ] + }, + { + "test_name": "broadcast_add_(1,)_and_(3, 1, 2)", + "op_name": "add", + "input_x": [-0.6620396375656128], + "input_y": [ + [ + [0.2589207887649536, -1.0626986026763916] + ], + [ + [-1.4913355112075806, 0.16728094220161438] + ], + [ + [0.7527865171432495, 0.611292839050293] + ] + ], + "expected_output": [ + [ + [-0.4031188488006592, -1.7247382402420044] + ], + [ + [-2.1533751487731934, -0.4947586953639984] + ], + [ + [0.09074687957763672, -0.050746798515319824] + ] + ], + "expected_grad_x": [6.0], + "expected_grad_y": [ + [ + [1.0, 1.0] + ], + [ + [1.0, 1.0] + ], + [ + [1.0, 1.0] + ] + ] + }, + { + "test_name": "broadcast_mul_(1,)_and_(3, 1, 2)", + "op_name": "mul", + "input_x": [-1.7093836069107056], + "input_y": [ + [ + [-0.6855810284614563, -0.10798398405313492] + ], + [ + [-0.5913975834846497, 0.7366381287574768] + ], + [ + [1.6831679344177246, -0.3644360899925232] + ] + ], + "expected_output": [ + [ + [1.1719210147857666, 0.1845860481262207] + ], + [ + [1.01092529296875, -1.2591971158981323] + ], + [ + [-2.8771796226501465, 0.6229611039161682] + ] + ], + "expected_grad_x": [0.6704073548316956], + "expected_grad_y": [ + [ + [-1.7093836069107056, -1.7093836069107056] + ], + [ + [-1.7093836069107056, -1.7093836069107056] + ], + [ + [-1.7093836069107056, -1.7093836069107056] + ] + ] + }, + { + "test_name": "broadcast_add_(5, 1, 4, 1)_and_(3, 1, 1)", + "op_name": "add", + "input_x": [ + [ + [ + [1.6522549390792847], + [-0.4681110680103302], + [-1.3070950508117676], + [-0.2728694975376129] + ] + ], + [ + [ + [-1.0562596321105957], + [0.24130821228027344], + [0.18275369703769684], + [0.6246524453163147] + ] + ], + [ + [ + [-0.7939775586128235], + [-0.674835205078125], + [-0.3876877427101135], + [0.44965043663978577] + ] + ], + [ + [ + [0.3726101517677307], + [-1.9104946851730347], + [0.26085028052330017], + [1.4177610874176025] + ] + ], + [ + [ + [0.6738032102584839], + [1.4665507078170776], + [-1.1077474355697632], + [-0.7443782091140747] + ] + ] + ], + "input_y": [ + [ + [-0.048239294439554214] + ], + [ + [-0.8039766550064087] + ], + [ + [-0.3882785737514496] + ] + ], + "expected_output": [ + [ + [ + [1.604015588760376], + [-0.5163503885269165], + [-1.3553344011306763], + [-0.32110878825187683] + ], + [ + [0.848278284072876], + [-1.2720876932144165], + [-2.1110715866088867], + [-1.0768461227416992] + ], + [ + [1.2639763355255127], + [-0.8563896417617798], + [-1.6953736543655396], + [-0.6611480712890625] + ] + ], + [ + [ + [-1.1044989824295044], + [0.19306892156600952], + [0.13451440632343292], + [0.5764131546020508] + ], + [ + [-1.8602362871170044], + [-0.5626684427261353], + [-0.621222972869873], + [-0.179324209690094] + ], + [ + [-1.4445382356643677], + [-0.14697036147117615], + [-0.20552487671375275], + [0.2363738715648651] + ] + ], + [ + [ + [-0.8422168493270874], + [-0.7230744957923889], + [-0.43592703342437744], + [0.40141114592552185] + ], + [ + [-1.597954273223877], + [-1.4788118600845337], + [-1.191664457321167], + [-0.3543262183666229] + ], + [ + [-1.1822561025619507], + [-1.063113808631897], + [-0.7759662866592407], + [0.06137186288833618] + ] + ], + [ + [ + [0.3243708610534668], + [-1.9587340354919434], + [0.21261098980903625], + [1.3695217370986938] + ], + [ + [-0.431366503238678], + [-2.7144713401794434], + [-0.5431263446807861], + [0.6137844324111938] + ], + [ + [-0.015668421983718872], + [-2.2987732887268066], + [-0.12742829322814941], + [1.0294824838638306] + ] + ], + [ + [ + [0.62556391954422], + [1.418311357498169], + [-1.1559867858886719], + [-0.7926174998283386] + ], + [ + [-0.1301734447479248], + [0.662574052810669], + [-1.9117240905761719], + [-1.5483548641204834] + ], + [ + [0.2855246365070343], + [1.0782721042633057], + [-1.4960260391235352], + [-1.1326568126678467] + ] + ] + ], + "expected_grad_x": [ + [ + [ + [3.0], + [3.0], + [3.0], + [3.0] + ] + ], + [ + [ + [3.0], + [3.0], + [3.0], + [3.0] + ] + ], + [ + [ + [3.0], + [3.0], + [3.0], + [3.0] + ] + ], + [ + [ + [3.0], + [3.0], + [3.0], + [3.0] + ] + ], + [ + [ + [3.0], + [3.0], + [3.0], + [3.0] + ] + ] + ], + "expected_grad_y": [ + [ + [20.0] + ], + [ + [20.0] + ], + [ + [20.0] + ] + ] + }, + { + "test_name": "broadcast_mul_(5, 1, 4, 1)_and_(3, 1, 1)", + "op_name": "mul", + "input_x": [ + [ + [ + [0.7183824777603149], + [-0.5719326138496399], + [-0.46663370728492737], + [0.1017654687166214] + ] + ], + [ + [ + [0.7780604362487793], + [-1.4069199562072754], + [0.4058326482772827], + [1.8279902935028076] + ] + ], + [ + [ + [-1.4415538311004639], + [-0.6341907978057861], + [-1.9628868103027344], + [-0.6502721905708313] + ] + ], + [ + [ + [-1.4416067600250244], + [-1.2435057163238525], + [-1.226744532585144], + [-0.14965394139289856] + ] + ], + [ + [ + [-1.2425028085708618], + [1.4803948402404785], + [0.6343469023704529], + [-0.7937742471694946] + ] + ] + ], + "input_y": [ + [ + [-0.24528881907463074] + ], + [ + [-0.477226585149765] + ], + [ + [-1.3771432638168335] + ] + ], + "expected_output": [ + [ + [ + [-0.1762111932039261], + [0.14028868079185486], + [0.11446002870798111], + [-0.024961931630969048] + ], + [ + [-0.34283122420310974], + [0.2729414403438568], + [0.22269001603126526], + [-0.04856518656015396] + ], + [ + [-0.9893155694007874], + [0.7876331210136414], + [0.6426214575767517], + [-0.14014562964439392] + ] + ], + [ + [ + [-0.19084952771663666], + [0.3451017439365387], + [-0.09954620897769928], + [-0.4483855664730072] + ], + [ + [-0.37131112813949585], + [0.671419620513916], + [-0.19367413222789764], + [-0.8723655939102173] + ], + [ + [-1.0715006589889526], + [1.9375303983688354], + [-0.5588896870613098], + [-2.517404556274414] + ] + ], + [ + [ + [0.3535970449447632], + [0.15555991232395172], + [0.4814741909503937], + [0.1595045030117035] + ], + [ + [0.6879478096961975], + [0.3026527166366577], + [0.9367417693138123], + [0.3103271722793579] + ], + [ + [1.9852261543273926], + [0.8733716011047363], + [2.703176259994507], + [0.8955179452896118] + ] + ], + [ + [ + [0.35361000895500183], + [0.30501803755760193], + [0.3009067177772522], + [0.03670843690633774] + ], + [ + [0.6879730820655823], + [0.5934339761734009], + [0.5854350924491882], + [0.07141883671283722] + ], + [ + [1.985298991203308], + [1.7124855518341064], + [1.6894029378890991], + [0.2060949206352234] + ] + ], + [ + [ + [0.3047720491886139], + [-0.3631243109703064], + [-0.1555982083082199], + [0.19470395147800446] + ], + [ + [0.5929553508758545], + [-0.706483781337738], + [-0.30272719264030457], + [0.37881016731262207] + ], + [ + [1.711104393005371], + [-2.0387158393859863], + [-0.8735865354537964], + [1.0931408405303955] + ] + ] + ], + "expected_grad_x": [ + [ + [ + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874] + ] + ], + [ + [ + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874] + ] + ], + [ + [ + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874] + ] + ], + [ + [ + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874] + ] + ], + [ + [ + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874], + [-2.099658727645874] + ] + ] + ], + "expected_grad_y": [ + [ + [-7.285405158996582] + ], + [ + [-7.285405158996582] + ], + [ + [-7.285405158996582] + ] + ] + } + ], + "matmul": [ + { + "test_name": "matmul_1D_dot_product", + "input_x": [-0.13354362547397614, -2.023963212966919, -1.170003056526184], + "input_y": [-1.1057088375091553, -0.5529776215553284, 1.1801894903182983], + "expected_output": -0.11395859718322754, + "expected_grad_x": [-1.1057088375091553, -0.5529776215553284, 1.1801894903182983], + "expected_grad_y": [-0.13354362547397614, -2.023963212966919, -1.170003056526184] + }, + { + "test_name": "matmul_2D_matrix_multiply", + "input_x": [ + [0.8215275406837463, 0.1607755422592163, -0.19208626449108124], + [-0.6162866353988647, -0.02657255157828331, -0.25292056798934937] + ], + "input_y": [ + [-0.28625473380088806, 0.3182649314403534, 1.1329631805419922, -1.1634578704833984], + [-2.6641499996185303, 1.012193202972412, -0.6236492991447449, 0.5437685251235962], + [-2.3363192081451416, 0.10341913998126984, 0.25140896439552307, 1.244645357131958] + ], + "expected_output": [ + [-0.21472150087356567, 0.4043339192867279, 0.7822006940841675, -1.1074672937393188], + [0.838111400604248, -0.24919579923152924, -0.7452446222305298, 0.38777783513069153] + ], + "expected_grad_x": [ + [0.001515507698059082, -1.7318376302719116, -0.736845850944519], + [0.001515507698059082, -1.7318376302719116, -0.736845850944519] + ], + "expected_grad_y": [ + [0.2052409052848816, 0.2052409052848816, 0.2052409052848816, 0.2052409052848816], + [0.1342029869556427, 0.1342029869556427, 0.1342029869556427, 0.1342029869556427], + [-0.4450068473815918, -0.4450068473815918, -0.4450068473815918, -0.4450068473815918] + ] + }, + { + "test_name": "matmul_1D_2D_prepend_remove", + "input_x": [0.5262587070465088, 0.7186952829360962, 0.2846910357475281], + "input_y": [ + [0.40772733092308044, 0.25750425457954407, 0.9894949793815613, -0.13609549403190613], + [-0.046313412487506866, -0.8070783019065857, -0.8997132778167725, 0.4604453444480896], + [0.09099376946687698, 1.264731764793396, -0.5384116172790527, -0.5032119154930115] + ], + "expected_output": [0.20718994736671448, -0.08447171002626419, -0.27917027473449707, 0.11603852361440659], + "expected_grad_x": [1.518631100654602, -1.2926596403121948, 0.3141019344329834], + "expected_grad_y": [ + [0.5262587070465088, 0.5262587070465088, 0.5262587070465088, 0.5262587070465088], + [0.7186952829360962, 0.7186952829360962, 0.7186952829360962, 0.7186952829360962], + [0.2846910357475281, 0.2846910357475281, 0.2846910357475281, 0.2846910357475281] + ] + }, + { + "test_name": "matmul_2D_1D_matrix_vector", + "input_x": [ + [-1.1151187419891357, -0.6466555595397949, -0.9256587624549866], + [1.9982808828353882, 0.34709975123405457, -0.10135623067617416] + ], + "input_y": [-0.7542831897735596, 1.9345619678497314, -1.2589340209960938], + "expected_output": [0.7554633021354675, -0.7081828713417053], + "expected_grad_x": [ + [-0.7542831897735596, 1.9345619678497314, -1.2589340209960938], + [-0.7542831897735596, 1.9345619678497314, -1.2589340209960938] + ], + "expected_grad_y": [0.8831621408462524, -0.29955580830574036, -1.027014970779419] + }, + { + "test_name": "matmul_ND_batched_with_broadcast", + "input_x": [ + [ + [ + [-0.38954418897628784, 0.5330391526222229, -0.24234455823898315], + [0.6653695106506348, 0.9382926821708679, 0.044521868228912354] + ] + ], + [ + [ + [1.1535056829452515, 1.0917677879333496, 0.04327869415283203], + [-1.7126272916793823, -1.030257225036621, -0.5207390189170837] + ] + ] + ], + "input_y": [ + [ + [1.6714451313018799, -0.4941222369670868], + [1.1431647539138794, 1.3563427925109863], + [-1.2485027313232422, -0.02572690322995186] + ], + [ + [1.936702013015747, -0.9197023510932922], + [0.8713390231132507, -0.522757887840271], + [-1.287382960319519, -1.2039293050765991] + ], + [ + [-0.6580986976623535, -0.35780131816864014], + [0.4201647937297821, -0.0068255518563091755], + [0.33024173974990845, -0.9533721804618835] + ] + ], + "expected_output": [ + [ + [ + [0.2608177065849304, 0.9217010736465454], + [2.1291661262512207, 0.942727267742157] + ], + [ + [0.022017061710357666, 0.3713800013065338], + [2.0488767623901367, -1.1560429334640503] + ], + [ + [0.4002905488014221, 0.3667857050895691], + [-0.02893828973174095, -0.28692036867141724] + ] + ], + [ + [ + [3.122058391571045, 0.9097251296043396], + [-3.3901724815368652, -0.5377377867698669] + ], + [ + [3.129580497741699, -1.6837165355682373], + [-3.544161319732666, 2.7406153678894043] + ], + [ + [-0.28610578179359436, -0.46143847703933716], + [0.5222302675247192, 1.1162704229354858] + ] + ] + ], + "expected_grad_x": [ + [ + [ + [1.1784225702285767, 3.261427640914917, -4.388672351837158], + [1.1784225702285767, 3.261427640914917, -4.388672351837158] + ] + ], + [ + [ + [1.1784225702285767, 3.261427640914917, -4.388672351837158], + [1.1784225702285767, 3.261427640914917, -4.388672351837158] + ] + ] + ], + "expected_grad_y": [ + [ + [-0.28329628705978394, -0.28329628705978394], + [1.5328423976898193, 1.5328423976898193], + [-0.6752830147743225, -0.6752830147743225] + ], + [ + [-0.28329628705978394, -0.28329628705978394], + [1.5328423976898193, 1.5328423976898193], + [-0.6752830147743225, -0.6752830147743225] + ], + [ + [-0.28329628705978394, -0.28329628705978394], + [1.5328423976898193, 1.5328423976898193], + [-0.6752830147743225, -0.6752830147743225] + ] + ] + } + ], + "reductions": [ + { + "test_name": "sum_dim_None_keepdim_False", + "op_name": "sum", + "dim": null, + "keepdim": false, + "input": [ + [ + [-0.4858148694038391, -0.49008455872535706, 0.9206437468528748, -0.6285682320594788, 0.6663421988487244], + [-0.03547006472945213, -0.11422315984964371, -0.05629456788301468, 0.2728163003921509, 0.7324181199073792], + [1.7147032022476196, -2.687852621078491, -0.042423129081726074, 0.7085697650909424, -1.2797164916992188], + [-1.5433486700057983, 1.2571464776992798, 0.5763423442840576, -0.9772611856460571, -1.533514380455017] + ], + [ + [0.7982524633407593, -0.5078546404838562, -0.91661536693573, 0.4702746272087097, -1.750167965888977], + [0.9156812429428101, -0.7037500739097595, 1.2993873357772827, -1.6580263376235962, 3.150531768798828], + [-0.2053070068359375, -1.4739270210266113, -0.03289732709527016, 0.3655078113079071, -0.03989681601524353], + [-1.2108122110366821, -1.6996815204620361, -0.4819910228252411, -0.9970720410346985, -0.34807199239730835] + ], + [ + [0.29087918996810913, -0.1686161607503891, -0.7467212677001953, 1.4040724039077759, 1.2316521406173706], + [2.9194436073303223, 0.4634987413883209, 0.6845051050186157, 1.5183887481689453, -0.91494220495224], + [2.251573324203491, -0.3680903911590576, -2.4219810962677, -0.08308011293411255, 0.683392345905304], + [2.353518486022949, 0.04335293173789978, 0.580488383769989, 0.6679447293281555, 0.6449635624885559] + ] + ], + "expected_output": 2.9822170734405518, + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "sum_dim_0_keepdim_False", + "op_name": "sum", + "dim": 0, + "keepdim": false, + "input": [ + [ + [1.4343817234039307, -1.9291707277297974, -1.6733232736587524, 0.9231958985328674, -0.020498022437095642], + [0.48795756697654724, 0.37779226899147034, -1.057392954826355, -0.22911065816879272, -0.3448553681373596], + [0.4634801745414734, 1.4675588607788086, 1.5677039623260498, 0.4278451204299927, -0.2732718884944916], + [0.5940448641777039, -0.34265488386154175, 1.2370060682296753, 1.1887205839157104, 1.002081274986267] + ], + [ + [-1.4646551609039307, -0.030163101851940155, -0.11644338816404343, 0.3850177526473999, 0.05211031809449196], + [0.2882064878940582, 0.35200777649879456, -0.3861907422542572, 0.3567925691604614, 1.7113765478134155], + [0.21664851903915405, 0.3170451521873474, -0.1140289157629013, -0.8452204465866089, 0.5263259410858154], + [0.12554097175598145, -1.7527469396591187, 0.7751637697219849, -0.10626579821109772, 0.8907954692840576] + ], + [ + [2.0987792015075684, -0.5285679697990417, -0.4658718407154083, 0.49164071679115295, 0.6064606308937073], + [-0.46068817377090454, -0.8800807595252991, -1.4766337871551514, 0.982934296131134, 0.034095875918865204], + [-0.461677223443985, 0.3484693765640259, -1.7167327404022217, 0.04617787152528763, 0.09389957040548325], + [-1.35635244846344, -1.0603324174880981, 1.0654057264328003, 0.8679789900779724, -0.5582399368286133] + ] + ], + "expected_output": [ + [2.0685057640075684, -2.4879016876220703, -2.255638599395752, 1.7998543977737427, 0.6380729079246521], + [0.3154758810997009, -0.15028071403503418, -2.920217514038086, 1.1106162071228027, 1.400617003440857], + [0.21845147013664246, 2.133073329925537, -0.2630577087402344, -0.3711974620819092, 0.3469536304473877], + [-0.6367666125297546, -3.155734062194824, 3.07757568359375, 1.9504337310791016, 1.3346368074417114] + ], + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "sum_dim_0_keepdim_True", + "op_name": "sum", + "dim": 0, + "keepdim": true, + "input": [ + [ + [0.02351505309343338, 0.28558799624443054, 0.020343216136097908, 0.9289091229438782, -0.9238923788070679], + [1.1473842859268188, 0.941576361656189, 0.4414193034172058, -1.7462857961654663, 0.7103408575057983], + [-0.10175959765911102, -0.9663392901420593, -1.4231536388397217, -0.7846477627754211, 1.7235876321792603], + [0.1520993411540985, -0.17470814287662506, -1.7561272382736206, 1.4259073734283447, 0.5127183794975281] + ], + [ + [-0.4026731848716736, 1.9770677089691162, -0.030815819278359413, -1.737373948097229, -0.09136287868022919], + [-0.5283262729644775, -0.46433013677597046, -0.15669254958629608, -1.5964131355285645, -1.5322294235229492], + [-0.11408505588769913, -0.9761450290679932, 0.24113479256629944, -0.05469474941492081, 0.04771281033754349], + [-0.8637551665306091, -1.141897201538086, -0.8292406797409058, 0.32627707719802856, -0.14103326201438904] + ], + [ + [-0.25818052887916565, -1.340453863143921, -0.5561836957931519, 1.173104166984558, 0.6556643843650818], + [-0.7471360564231873, 1.1705763339996338, 1.6411151885986328, 0.428303986787796, -1.0703928470611572], + [-0.6159464716911316, -1.0194525718688965, 0.37926265597343445, 0.9845729470252991, -1.7715047597885132], + [0.4349344074726105, 0.06312361359596252, -0.6894504427909851, -0.2829807698726654, 0.6600029468536377] + ] + ], + "expected_output": [ + [ + [-0.6373386383056641, 0.9222018718719482, -0.5666562914848328, 0.3646393418312073, -0.3595908284187317], + [-0.12807804346084595, 1.647822618484497, 1.9258419275283813, -2.9143950939178467, -1.892281413078308], + [-0.8317911624908447, -2.9619369506835938, -0.8027561902999878, 0.14523041248321533, -0.00020432472229003906], + [-0.27672144770622253, -1.2534817457199097, -3.2748184204101562, 1.4692035913467407, 1.0316880941390991] + ] + ], + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "sum_dim_1_keepdim_False", + "op_name": "sum", + "dim": 1, + "keepdim": false, + "input": [ + [ + [-1.2625644207000732, -0.1746397167444229, 0.5484238862991333, 0.45765289664268494, 0.9676891565322876], + [-0.6673583388328552, 1.6183183193206787, -0.26440465450286865, -1.9805819988250732, 0.6427897214889526], + [-0.3697128891944885, -0.24597389996051788, 0.3293827772140503, 0.6973733305931091, 0.8342204093933105], + [-1.098692774772644, 0.17085814476013184, -0.7267439365386963, -3.832531690597534, 0.11584769189357758] + ], + [ + [1.9915446043014526, 1.0238094329833984, 2.1327197551727295, 0.23347528278827667, -0.3182602822780609], + [-0.28946641087532043, -0.9444983005523682, -0.1793764978647232, 0.31204938888549805, 0.717379093170166], + [-0.2302778661251068, 0.7147674560546875, 0.14227113127708435, -0.9353750944137573, -0.8332573175430298], + [-0.952683687210083, -0.36366915702819824, -1.2479252815246582, -0.09261447191238403, 0.6533640027046204] + ], + [ + [-0.5295901894569397, -0.6448984146118164, -0.369537353515625, -0.47141557931900024, 1.2438589334487915], + [-1.7002424001693726, -1.2208786010742188, -1.3158544301986694, -1.8084080219268799, -0.4799889922142029], + [0.7350953221321106, -0.269319087266922, 0.8924283385276794, 1.3709611892700195, 0.7821508049964905], + [-0.30908405780792236, 0.6509400010108948, -1.5479099750518799, -1.2930028438568115, 0.8795658946037292] + ] + ], + "expected_output": [ + [-3.3983283042907715, 1.3685628175735474, -0.11334192752838135, -4.658087253570557, 2.560546875], + [0.5191166400909424, 0.43040943145751953, 0.847689151763916, -0.48246490955352783, 0.21922549605369568], + [-1.8038214445114136, -1.4841561317443848, -2.3408734798431396, -2.2018651962280273, 2.425586700439453] + ], + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "sum_dim_1_keepdim_True", + "op_name": "sum", + "dim": 1, + "keepdim": true, + "input": [ + [ + [-0.03328368067741394, -0.1935928761959076, -0.7671046257019043, 1.041800618171692, -0.5939895510673523], + [-0.9907869696617126, -0.9927570819854736, -0.2557562291622162, -0.22515931725502014, 0.7214905023574829], + [0.12578019499778748, 1.4050774574279785, 0.19477412104606628, -0.4741835296154022, 0.1276102066040039], + [0.6101059317588806, -0.3841469883918762, -0.921748697757721, -0.4872817397117615, -0.5074361562728882] + ], + [ + [0.5522661805152893, 0.061027202755212784, -0.9556332230567932, -0.7779799103736877, 1.3941880464553833], + [-0.2754690647125244, -0.5164039731025696, 0.1548738032579422, -0.8348855972290039, 0.22885975241661072], + [-2.0042972564697266, 0.005512263625860214, -0.3478994369506836, 0.5377600193023682, -0.9290909767150879], + [1.1321873664855957, -0.7327660322189331, 1.1509846448898315, -0.6960206627845764, 0.5155009627342224] + ], + [ + [0.27344995737075806, 0.6651293039321899, 1.0537790060043335, 0.7269541621208191, -0.4144572615623474], + [-0.7832543253898621, 1.1300108432769775, -1.612937569618225, 0.445094496011734, 0.051411572843790054], + [-0.35013654828071594, -1.006677508354187, -0.534939169883728, 0.9171100854873657, -1.031330943107605], + [-0.3431452810764313, -0.5040718913078308, 0.07251296937465668, -1.078025221824646, -0.39548805356025696] + ] + ], + "expected_output": [ + [ + [-0.2881844639778137, -0.16541951894760132, -1.74983549118042, -0.14482393860816956, -0.25232499837875366] + ], + [ + [-0.5953128337860107, -1.1826305389404297, 0.002325773239135742, -1.7711260318756104, 1.2094578742980957] + ], + [ + [-1.2030861377716064, 0.28439074754714966, -1.0215847492218018, 1.0111335515975952, -1.7898646593093872] + ] + ], + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "sum_dim_-1_keepdim_False", + "op_name": "sum", + "dim": -1, + "keepdim": false, + "input": [ + [ + [-0.16857317090034485, 0.688886284828186, 0.09771557152271271, 1.052785038948059, -1.1113090515136719], + [-0.9389597773551941, -1.3075956106185913, 0.9676118493080139, -0.7394251227378845, -1.5103271007537842], + [0.08060058951377869, -1.8929684162139893, 0.5650917887687683, 0.8412631750106812, -0.9580911993980408], + [-0.7895215749740601, 0.015372429974377155, -0.027610722929239273, -0.053718894720077515, -0.7734133005142212] + ], + [ + [-1.3837915658950806, 0.97823166847229, 1.2639201879501343, -2.0509865283966064, 0.7195857763290405], + [0.5959717631340027, -1.8225737810134888, -0.1343626081943512, 0.49848124384880066, 0.3930509090423584], + [-0.6577728390693665, 0.24517367780208588, 0.20588766038417816, -0.8329556584358215, -0.820264458656311], + [-0.8202312588691711, 0.9118992686271667, -1.6226643323898315, 0.6528452634811401, 0.474252313375473] + ], + [ + [-1.0028913021087646, 0.06786083430051804, 0.5943557024002075, -1.4299345016479492, -1.8149548768997192], + [-0.48883718252182007, 0.3190588653087616, 0.8207234740257263, 0.26855215430259705, 0.5349103808403015], + [0.2828715443611145, 0.2559785544872284, -0.3839770257472992, 0.9996393918991089, -0.411981463432312], + [1.7920981645584106, -0.7672699093818665, -1.307977557182312, -1.7257500886917114, -0.03232753649353981] + ] + ], + "expected_output": [ + [0.5595046877861023, -3.528696060180664, -1.364104151725769, -1.628892183303833], + [-0.47304046154022217, -0.4694325029850006, -1.8599317073822021, -0.4038987159729004], + [-3.585564136505127, 1.4544076919555664, 0.7425310015678406, -2.041226863861084] + ], + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "sum_dim_-1_keepdim_True", + "op_name": "sum", + "dim": -1, + "keepdim": true, + "input": [ + [ + [1.3067461252212524, 0.1913401335477829, 1.3767470121383667, 0.6892044544219971, 0.43921586871147156], + [-0.04813985526561737, -0.3315163552761078, -0.23632389307022095, -0.08464081585407257, -0.2434539496898651], + [-1.4118940830230713, -1.0503880977630615, 1.110618233680725, 0.21369199454784393, 0.9332243204116821], + [0.7342482805252075, -1.1814165115356445, -0.9001671671867371, 2.1022186279296875, 0.13313262164592743] + ], + [ + [0.022425031289458275, -0.2799490988254547, -0.13277991116046906, -1.1816505193710327, -1.7333787679672241], + [1.1858826875686646, 0.4465799629688263, 1.1301037073135376, -0.3333589434623718, 0.8935588598251343], + [-1.948160171508789, -1.366417646408081, 0.009384111501276493, 0.6304294466972351, 0.8632985949516296], + [1.1419644355773926, 0.3109254240989685, -0.10642467439174652, -1.2388849258422852, -1.1811842918395996] + ], + [ + [0.8579769134521484, -1.6933249235153198, 0.28935855627059937, 0.12979160249233246, 0.9177106618881226], + [-0.08528606593608856, 0.646186888217926, 0.9474418759346008, -0.9643430113792419, -0.05544678866863251], + [0.6487414836883545, -1.5244767665863037, 0.7500134110450745, -2.1544389724731445, 1.4023377895355225], + [0.38867735862731934, 0.7254555821418762, -1.389511227607727, -0.8552696704864502, -1.5335310697555542] + ] + ], + "expected_output": [ + [ + [4.003253936767578], + [-0.9440748691558838], + [-0.20474763214588165], + [0.888015866279602] + ], + [ + [-3.305333137512207], + [3.322766065597534], + [-1.8114655017852783], + [-1.0736039876937866] + ], + [ + [0.5015128254890442], + [0.48855286836624146], + [-0.8778231143951416], + [-2.6641790866851807] + ] + ], + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "mean_dim_None_keepdim_False", + "op_name": "mean", + "dim": null, + "keepdim": false, + "input": [ + [ + [-1.1046700477600098, 0.3997063636779785, -0.7377328872680664, 0.8807763457298279, -0.7662639021873474], + [1.4585676193237305, 1.7723872661590576, 0.32965973019599915, -0.014356887899339199, 1.0570905208587646], + [0.40178006887435913, 0.6623095273971558, -0.8157885074615479, 1.7509973049163818, -0.07119281589984894], + [-0.3190559148788452, 0.050714727491140366, 0.019659370183944702, -0.06124546006321907, -0.9422666430473328] + ], + [ + [1.4318883419036865, 0.13153165578842163, -0.18222470581531525, 0.3660371005535126, 0.5798347592353821], + [0.2429470419883728, -1.2091906070709229, 0.07767737656831741, 0.17619068920612335, 0.578863799571991], + [1.5511679649353027, 0.08444352447986603, 1.2101210355758667, -0.8561125993728638, -0.01396828331053257], + [-0.2777872681617737, -0.7856798768043518, 0.46062397956848145, 0.566218376159668, 1.1541569232940674] + ], + [ + [-0.4426518380641937, 1.2387607097625732, -1.0242971181869507, 0.7994512319564819, 0.2551962435245514], + [-1.102837324142456, 0.8238721489906311, 0.0035749999806284904, -1.7263970375061035, 0.23050972819328308], + [-0.006388451438397169, -0.3764995038509369, -0.12150754034519196, 0.41562098264694214, 1.0095174312591553], + [0.6818063259124756, -0.7300068736076355, -0.8303658366203308, -0.9434769749641418, 2.028413772583008] + ] + ], + "expected_output": 0.15700183808803558, + "expected_grad": [ + [ + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107] + ], + [ + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107] + ], + [ + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107], + [0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107, 0.01666666753590107] + ] + ] + }, + { + "test_name": "mean_dim_0_keepdim_False", + "op_name": "mean", + "dim": 0, + "keepdim": false, + "input": [ + [ + [0.1755259782075882, 0.29882222414016724, -0.6017733812332153, 1.1090705394744873, -0.26971787214279175], + [-0.4379420578479767, -1.0124340057373047, -0.14667245745658875, 0.15679313242435455, 0.19906163215637207], + [0.6868006587028503, -0.4260328412055969, -0.7560138702392578, 0.17112013697624207, -0.7641999125480652], + [-0.44291654229164124, 1.180337905883789, 0.37527406215667725, 0.16405928134918213, -0.5820143818855286] + ], + [ + [-0.6077602505683899, -0.20455589890480042, 0.5790372490882874, -0.19989891350269318, -0.06916097551584244], + [-0.2501976788043976, -0.631926953792572, -0.10361812263727188, 0.9832454323768616, -0.21117885410785675], + [0.8466978073120117, 0.8503185510635376, -1.003414511680603, -0.9655314683914185, 0.21109014749526978], + [-1.413875699043274, 0.8920503854751587, -0.30397742986679077, -0.05114508792757988, -0.6013848185539246] + ], + [ + [0.11547855287790298, -0.17881472408771515, -0.09405016899108887, -0.6329993009567261, -1.0465848445892334], + [1.3759163618087769, 0.05762922391295433, 0.35209375619888306, 1.0967378616333008, -0.9882838726043701], + [1.9996482133865356, -1.2997536659240723, -0.7523331046104431, -0.47686854004859924, 1.2204824686050415], + [-0.15345095098018646, -0.5119454264640808, 0.11924147605895996, 1.428880214691162, 2.0869503021240234] + ] + ], + "expected_output": [ + [-0.1055852472782135, -0.028182798996567726, -0.038928765803575516, 0.09205744415521622, -0.46182122826576233], + [0.22925888001918793, -0.5289105772972107, 0.033934395760297775, 0.7455921173095703, -0.333467036485672], + [1.1777156591415405, -0.2918226420879364, -0.8372538089752197, -0.42375993728637695, 0.22245757281780243], + [-0.6700810790061951, 0.5201475620269775, 0.06351270526647568, 0.5139314532279968, 0.30118370056152344] + ], + "expected_grad": [ + [ + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408] + ], + [ + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408] + ], + [ + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408] + ] + ] + }, + { + "test_name": "mean_dim_0_keepdim_True", + "op_name": "mean", + "dim": 0, + "keepdim": true, + "input": [ + [ + [0.9313072562217712, 0.11275313794612885, -0.29632923007011414, 0.6281623244285583, 0.797209620475769], + [0.6957338452339172, -0.18519674241542816, 0.19908924400806427, 0.21209175884723663, 1.1861251592636108], + [-0.08824097365140915, -1.1839087009429932, -0.23501551151275635, 0.6680712103843689, -0.7205530405044556], + [0.9816026091575623, -0.15770527720451355, 0.5783246755599976, 0.5183308720588684, 0.10639314353466034] + ], + [ + [-0.0708099901676178, 0.1868356168270111, -1.135615348815918, -0.6121562123298645, 0.4646506905555725], + [0.9826740026473999, 0.4591689705848694, 0.3639809787273407, 0.24808230996131897, -0.5714194178581238], + [-1.8829892873764038, 1.6257703304290771, 1.1227099895477295, -0.24534782767295837, 0.42072793841362], + [1.2927830219268799, -0.062042392790317535, -0.07447009533643723, 1.8144526481628418, 0.4126001298427582] + ], + [ + [1.5678290128707886, -0.5997704267501831, 0.13211850821971893, -0.4554636776447296, -0.32575365900993347], + [2.110002040863037, -0.5582384467124939, 0.3878994286060333, -2.2768142223358154, 0.37306782603263855], + [-0.9947562217712402, -1.3262041807174683, -0.599851667881012, 0.17797666788101196, -1.143309235572815], + [1.3394620418548584, -0.996752142906189, -0.6021904945373535, 0.32188680768013, 0.1322384774684906] + ] + ], + "expected_output": [ + [ + [0.8094420433044434, -0.10006055980920792, -0.43327537178993225, -0.1464858502149582, 0.31203556060791016], + [1.262803316116333, -0.09475541114807129, 0.3169898986816406, -0.6055467128753662, 0.3292578458786011], + [-0.9886621832847595, -0.2947808504104614, 0.09594760090112686, 0.2002333402633667, -0.4810447692871094], + [1.2046159505844116, -0.4054999351501465, -0.03277863934636116, 0.884890079498291, 0.21707725524902344] + ] + ], + "expected_grad": [ + [ + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408] + ], + [ + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408] + ], + [ + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408], + [0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408] + ] + ] + }, + { + "test_name": "mean_dim_1_keepdim_False", + "op_name": "mean", + "dim": 1, + "keepdim": false, + "input": [ + [ + [0.27381452918052673, 1.7188366651535034, 1.3450311422348022, -1.2653104066848755, 0.45117413997650146], + [0.6597211360931396, -2.012479305267334, -0.516261100769043, -1.0803741216659546, 1.424518346786499], + [-1.1213253736495972, 0.3918372392654419, -0.49036842584609985, -0.2890629470348358, -0.7217605710029602], + [1.9135220050811768, 1.668276309967041, 0.674127995967865, 1.1708104610443115, 0.02161409705877304] + ], + [ + [-0.9156265258789062, 0.21846671402454376, -0.5406830906867981, -0.8299271464347839, 2.0619850158691406], + [0.3048802316188812, -1.4610782861709595, -0.20266015827655792, 0.03146770969033241, 0.2910449504852295], + [0.5939645171165466, 1.3427438735961914, -0.6950612664222717, 0.8987486362457275, -0.763476550579071], + [-0.19663105905056, -0.3633078634738922, -1.5530608892440796, -0.15105754137039185, -0.5337246656417847] + ], + [ + [1.6809849739074707, -0.11213088780641556, -0.5451535582542419, 0.5253366231918335, 0.5843788981437683], + [-2.286334276199341, 0.5851041674613953, 1.2774115800857544, 0.8801257610321045, 0.5200220942497253], + [-1.0255969762802124, 1.7627143859863281, -0.0564274862408638, 0.9568160176277161, -0.6867133378982544], + [1.02428138256073, 1.7308460474014282, -0.32468941807746887, 0.3143851161003113, -0.0669134333729744] + ] + ], + "expected_output": [ + [0.4314330816268921, 0.4416177272796631, 0.2531324028968811, -0.3659842610359192, 0.2938864827156067], + [-0.053353201597929, -0.06579387933015823, -0.7478663921356201, -0.012692078948020935, 0.2639571726322174], + [-0.15166622400283813, 0.991633415222168, 0.0877852812409401, 0.6691659092903137, 0.08769355714321136] + ], + "expected_grad": [ + [ + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25] + ], + [ + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25] + ], + [ + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25] + ] + ] + }, + { + "test_name": "mean_dim_1_keepdim_True", + "op_name": "mean", + "dim": 1, + "keepdim": true, + "input": [ + [ + [0.8160833120346069, -0.8233374357223511, -2.1418421268463135, 0.7724379897117615, -0.6358500719070435], + [0.2520127594470978, 0.9832903742790222, 0.27252840995788574, -0.9334322810173035, -0.120673269033432], + [-1.6670103073120117, -2.2226719856262207, -1.0847980976104736, 0.612401008605957, -0.5785004496574402], + [-0.725740909576416, 0.42209234833717346, -1.268004298210144, 1.395574688911438, -0.4016047418117523] + ], + [ + [-0.47602152824401855, 0.6024074554443359, -0.13895398378372192, -0.5199072957038879, -0.49655959010124207], + [1.612599492073059, -0.3255579173564911, 0.9303890466690063, -0.2840443551540375, 0.8463886380195618], + [0.018565375357866287, -1.6755516529083252, -1.5852235555648804, 1.1069889068603516, -0.6744462847709656], + [-1.8892930746078491, -1.842443585395813, 0.13227719068527222, -0.792870283126831, 1.2297093868255615] + ], + [ + [-0.013317405246198177, -0.30806609988212585, -0.3388381898403168, -0.46696820855140686, 0.14354214072227478], + [-0.9078568816184998, 2.190314531326294, 0.2566744089126587, 0.4625372588634491, -1.1410428285598755], + [1.7502615451812744, -0.11171314865350723, -1.1738200187683105, -0.6320435404777527, -1.0962568521499634], + [-0.7847272753715515, -0.5752313137054443, 0.5138634443283081, 1.050782322883606, 1.3841030597686768] + ] + ], + "expected_output": [ + [ + [-0.33116379380226135, -0.4101566970348358, -1.055528998374939, 0.46174535155296326, -0.4341571629047394] + ], + [ + [-0.18353742361068726, -0.8102864027023315, -0.16537782549858093, -0.12245824933052063, 0.22627303004264832] + ], + [ + [0.011089995503425598, 0.2988259792327881, -0.18553009629249573, 0.10357695817947388, -0.17741364240646362] + ] + ], + "expected_grad": [ + [ + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25] + ], + [ + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25] + ], + [ + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25], + [0.25, 0.25, 0.25, 0.25, 0.25] + ] + ] + }, + { + "test_name": "mean_dim_-1_keepdim_False", + "op_name": "mean", + "dim": -1, + "keepdim": false, + "input": [ + [ + [1.459913969039917, -1.9324012994766235, 0.295070618391037, 1.953094482421875, 0.062261566519737244], + [-1.1078839302062988, 0.976121723651886, -0.07730520516633987, -0.29665258526802063, -0.26533499360084534], + [-0.79975426197052, 1.0418704748153687, 1.3010565042495728, -0.40846967697143555, -0.7162472605705261], + [-1.0271012783050537, 0.1834021359682083, -0.05814415216445923, 0.3220038414001465, 0.4668942093849182] + ], + [ + [0.014412929303944111, -1.1832906007766724, 0.03632983937859535, 2.4672696590423584, 1.3323793411254883], + [-0.09307997673749924, -1.1491583585739136, 0.39312395453453064, -0.3892734944820404, 0.5293436646461487], + [-0.6360014081001282, -0.2509534955024719, 0.4555503726005554, -0.27460846304893494, 0.9494737386703491], + [-0.8720837235450745, 0.8029743432998657, 0.27744394540786743, 0.33585336804389954, -0.7603669166564941] + ], + [ + [-0.6846820712089539, -0.7688948512077332, 1.9104690551757812, -0.9281872510910034, -0.49309614300727844], + [0.50408536195755, 0.998515248298645, 0.0367455929517746, 0.2297750562429428, -1.984581470489502], + [-1.2512000799179077, 0.6219334602355957, 0.051693860441446304, -0.5315346121788025, 0.5726004242897034], + [0.3863498866558075, 0.27223649621009827, 0.46132177114486694, 0.13579803705215454, 0.516796886920929] + ] + ], + "expected_output": [ + [0.3675878643989563, -0.15421099960803986, 0.08369116485118866, -0.022589052096009254], + [0.533420205116272, -0.14180883765220642, 0.04869214817881584, -0.04323578625917435], + [-0.19287827610969543, -0.04309204965829849, -0.10730139166116714, 0.35450059175491333] + ], + "expected_grad": [ + [ + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224] + ], + [ + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224] + ], + [ + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224] + ] + ] + }, + { + "test_name": "mean_dim_-1_keepdim_True", + "op_name": "mean", + "dim": -1, + "keepdim": true, + "input": [ + [ + [1.0863715410232544, -0.48916497826576233, 1.751997470855713, -0.3743889033794403, 0.9508031606674194], + [1.3071852922439575, -0.7512642741203308, 0.005139758810400963, 1.6046744585037231, 0.08605138957500458], + [1.9344228506088257, -0.44564706087112427, -0.47476428747177124, -0.30261677503585815, -0.29394063353538513], + [2.5355067253112793, 1.848445177078247, -1.1966158151626587, -0.4228180944919586, -0.7322655916213989] + ], + [ + [-0.3979387581348419, -0.004983691032975912, -2.165992498397827, 0.5304996371269226, 2.260444164276123], + [-0.3309515118598938, 1.8614799976348877, -0.11587686836719513, -0.6822512745857239, 0.4212978780269623], + [-1.3226412534713745, 1.2114580869674683, -0.8475749492645264, -0.09313177317380905, -1.2082672119140625], + [3.168529987335205, 2.297508955001831, -0.14129851758480072, -1.9953597784042358, 0.035315316170454025] + ], + [ + [-1.0469098091125488, -0.16152557730674744, -0.5230178833007812, -1.1935372352600098, 1.8063979148864746], + [0.9378061294555664, -0.38452786207199097, 0.658501386642456, 0.7616772055625916, -0.6450856328010559], + [1.6012887954711914, 0.35755881667137146, 0.26440107822418213, -0.5598823428153992, 1.4536970853805542], + [-0.2962888479232788, -0.4702155888080597, -1.499106526374817, -0.01866411603987217, -1.5055760145187378] + ] + ], + "expected_output": [ + [ + [0.5851236581802368], + [0.4503573477268219], + [0.08349082618951797], + [0.40645045042037964] + ], + [ + [0.04440578073263168], + [0.23073963820934296], + [-0.4520314633846283], + [0.6729391813278198] + ], + [ + [-0.223718523979187], + [0.26567426323890686], + [0.6234126687049866], + [-0.7579702138900757] + ] + ], + "expected_grad": [ + [ + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224] + ], + [ + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224] + ], + [ + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224], + [0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224, 0.20000000298023224] + ] + ] + }, + { + "test_name": "max_dim_None_keepdim_False", + "op_name": "max", + "dim": null, + "keepdim": false, + "input": [ + [ + [1.3179066181182861, 0.7556464672088623, 1.2471561431884766, 0.7881364822387695, 1.5493229627609253], + [-0.6088662147521973, 1.4112409353256226, -0.9075624346733093, 1.1897660493850708, -0.4802001118659973], + [2.2536263465881348, 1.1717573404312134, 0.8793008327484131, -0.7796709537506104, 1.4039242267608643], + [2.2192718982696533, 0.36377331614494324, 1.2563191652297974, -0.1220834031701088, 0.10120877623558044] + ], + [ + [0.47121289372444153, 0.6840168237686157, -0.7728962898254395, -0.3962509334087372, 0.6629142165184021], + [0.6556671857833862, 0.058465536683797836, 0.7882350087165833, -1.0857888460159302, 1.0519514083862305], + [0.2087085098028183, -1.4547972679138184, -0.17086558043956757, -2.0883853435516357, 0.7963455319404602], + [0.4962165355682373, 0.6029451489448547, -0.5226418375968933, 0.06845283508300781, 0.3298587203025818] + ], + [ + [-0.31479185819625854, 0.02101830020546913, -0.054547298699617386, -0.8116031885147095, -0.441931813955307], + [0.01032029278576374, 1.8312735557556152, -0.6159215569496155, -0.6072822213172913, -2.05973744392395], + [1.5289140939712524, 0.3378683924674988, -0.6198564171791077, -0.7939390540122986, 0.6710167527198792], + [-0.4096096158027649, -0.530225396156311, 0.2532861530780792, -0.19900000095367432, 0.6101416945457458] + ] + ], + "expected_output": 2.2536263465881348, + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "max_dim_0_keepdim_False", + "op_name": "max", + "dim": 0, + "keepdim": false, + "input": [ + [ + [1.5443801879882812, -0.35117071866989136, 0.35557520389556885, -1.8119957447052002, 0.46456536650657654], + [-0.5480050444602966, -1.0596239566802979, 0.17400647699832916, 0.7506431937217712, -0.6409745812416077], + [-0.15132363140583038, 0.625577986240387, -0.6219039559364319, -1.087321400642395, -1.3252078294754028], + [0.37722504138946533, -1.4066274166107178, -0.2883475422859192, -0.9860185384750366, 1.4865750074386597] + ], + [ + [0.14713206887245178, -1.366032361984253, -0.6708536148071289, 0.9521092176437378, -2.338611602783203], + [-1.59005606174469, -0.8660301566123962, 1.278135895729065, 0.35259687900543213, -0.07500791549682617], + [0.40587425231933594, 0.5351168513298035, -1.4525551795959473, 1.2706263065338135, 0.2695809602737427], + [-0.031602293252944946, -1.2756946086883545, -0.6372569799423218, -0.761553168296814, -0.4670298099517822] + ], + [ + [-2.774542808532715, -0.20163610577583313, -0.489888072013855, -1.5937029123306274, 0.7888379693031311], + [-0.9375290870666504, -0.14468862116336823, -0.974236011505127, 0.6229617595672607, -0.34589967131614685], + [-1.7239394187927246, -0.9909027814865112, -0.49027347564697266, 0.09400757402181625, 0.8530552983283997], + [-0.01044454239308834, 1.114724040031433, -1.1784462928771973, 0.6866339445114136, 1.5644149780273438] + ] + ], + "expected_output": [ + [1.5443801879882812, -0.20163610577583313, 0.35557520389556885, 0.9521092176437378, 0.7888379693031311], + [-0.5480050444602966, -0.14468862116336823, 1.278135895729065, 0.7506431937217712, -0.07500791549682617], + [0.40587425231933594, 0.625577986240387, -0.49027347564697266, 1.2706263065338135, 0.8530552983283997], + [0.37722504138946533, 1.114724040031433, -0.2883475422859192, 0.6866339445114136, 1.5644149780273438] + ], + "expected_grad": [ + [ + [1.0, 0.0, 1.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 1.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 1.0], + [1.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 1.0, 0.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "max_dim_0_keepdim_True", + "op_name": "max", + "dim": 0, + "keepdim": true, + "input": [ + [ + [0.15242499113082886, 0.06238764151930809, 0.4020906984806061, 1.6428531408309937, -0.4024767577648163], + [-0.6926602125167847, 0.5779818296432495, -0.02104124240577221, 0.3675006031990051, -1.6424492597579956], + [1.6906514167785645, -1.885027527809143, -0.7859907150268555, -1.2742111682891846, -1.0625808238983154], + [-0.632580041885376, -0.8688688278198242, -1.5556751489639282, 0.4562793970108032, 0.26206839084625244] + ], + [ + [0.5024110674858093, -0.4759514033794403, 1.6630321741104126, -1.454459547996521, 1.3266892433166504], + [0.6551998853683472, 0.6860383749008179, -0.6831625699996948, 0.8929892778396606, 1.6672981977462769], + [-1.0136138200759888, -1.391968846321106, -0.8026610612869263, 0.45739221572875977, -0.1074383333325386], + [2.130958080291748, 0.24406017363071442, 1.0900812149047852, 0.7718657851219177, 0.01673818565905094] + ], + [ + [0.2065047323703766, -0.3839341402053833, -0.5727554559707642, -0.368385910987854, 1.0989755392074585], + [-0.2800213098526001, -0.4368455111980438, -0.7779284715652466, 1.2181257009506226, -0.1532776802778244], + [0.3922649621963501, -0.24747081100940704, 0.2792662978172302, 0.2393292933702469, 0.08084768056869507], + [0.45395395159721375, -1.3717968463897705, -0.47459128499031067, -0.9676264524459839, -0.6989938616752625] + ] + ], + "expected_output": [ + [ + [0.5024110674858093, 0.06238764151930809, 1.6630321741104126, 1.6428531408309937, 1.3266892433166504], + [0.6551998853683472, 0.6860383749008179, -0.02104124240577221, 1.2181257009506226, 1.6672981977462769], + [1.6906514167785645, -0.24747081100940704, 0.2792662978172302, 0.45739221572875977, 0.08084768056869507], + [2.130958080291748, 0.24406017363071442, 1.0900812149047852, 0.7718657851219177, 0.26206839084625244] + ] + ], + "expected_grad": [ + [ + [0.0, 1.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0] + ], + [ + [1.0, 0.0, 1.0, 0.0, 1.0], + [1.0, 1.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [1.0, 1.0, 1.0, 1.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 1.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "max_dim_1_keepdim_False", + "op_name": "max", + "dim": 1, + "keepdim": false, + "input": [ + [ + [1.1716068983078003, -1.7179490327835083, -0.534604549407959, -1.306166410446167, -0.9246346950531006], + [0.6214187741279602, 0.1785161793231964, 0.2866869568824768, 0.4850883483886719, 0.005962289869785309], + [-0.3370489478111267, 1.0511995553970337, -1.3509515523910522, 0.5409456491470337, 1.134432315826416], + [-0.4212900400161743, -0.10431576520204544, 0.9755558371543884, -1.0027341842651367, 1.28053879737854] + ], + [ + [1.2130928039550781, 0.12110324949026108, -1.2743698358535767, 0.884564220905304, 0.12699493765830994], + [-0.8810292482376099, 1.8218294382095337, 1.5863804817199707, 0.865519106388092, -0.27654826641082764], + [-0.19467514753341675, -0.03429529443383217, 0.7240417003631592, 0.13301636278629303, -1.0541536808013916], + [-2.312596321105957, -0.902730405330658, -1.4185947179794312, 0.37751105427742004, -0.2093215435743332] + ], + [ + [-1.1487538814544678, 0.22709836065769196, -0.9184321165084839, 0.9846003651618958, -0.06645472347736359], + [1.2572640180587769, 0.1582425981760025, -1.742975115776062, -1.2938947677612305, 1.3074569702148438], + [-0.8769122958183289, -0.015859045088291168, -0.6937687397003174, -0.8013211488723755, -0.07756809890270233], + [-0.5014570951461792, -2.2269856929779053, -0.17264695465564728, -0.4442422091960907, 0.2926892936229706] + ] + ], + "expected_output": [ + [1.1716068983078003, 1.0511995553970337, 0.9755558371543884, 0.5409456491470337, 1.28053879737854], + [1.2130928039550781, 1.8218294382095337, 1.5863804817199707, 0.884564220905304, 0.12699493765830994], + [1.2572640180587769, 0.22709836065769196, -0.17264695465564728, 0.9846003651618958, 1.3074569702148438] + ], + "expected_grad": [ + [ + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 1.0] + ], + [ + [1.0, 0.0, 0.0, 1.0, 1.0], + [0.0, 1.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 1.0, 0.0, 1.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "max_dim_1_keepdim_True", + "op_name": "max", + "dim": 1, + "keepdim": true, + "input": [ + [ + [0.05868193879723549, 1.5382329225540161, 1.0444575548171997, -0.26301294565200806, 0.2191448211669922], + [0.05122251436114311, -0.6351413130760193, -0.9322060346603394, -0.2185828685760498, 0.4121098220348358], + [-1.132474660873413, -2.3891191482543945, 0.7177993655204773, -1.5831094980239868, 0.6412859559059143], + [-0.2708216905593872, -0.6109879612922668, 0.11032737791538239, 0.12355764210224152, -1.4388847351074219] + ], + [ + [-0.45935776829719543, 0.719353199005127, -1.2023998498916626, -0.5769718289375305, 0.7339244484901428], + [0.0939386859536171, 1.0834808349609375, 0.8089823126792908, -0.9773237109184265, -0.26083904504776], + [-0.07018473744392395, -1.206803798675537, 1.5053801536560059, -0.0004540873342193663, -0.8399935364723206], + [-0.9963456988334656, 1.9695827960968018, -0.6241140961647034, -0.11510364711284637, 1.3116685152053833] + ], + [ + [0.9128003120422363, -0.8139405250549316, -0.32805025577545166, -1.6033560037612915, -0.6016383171081543], + [-0.3473812937736511, -0.9625334739685059, 0.9539335370063782, -1.4123497009277344, 0.812851071357727], + [1.4345933198928833, 0.05774686485528946, 0.8243234157562256, 0.3210548460483551, -0.6046251654624939], + [-0.6875012516975403, 0.20559696853160858, -0.7192203998565674, -1.1452873945236206, 0.888896107673645] + ] + ], + "expected_output": [ + [ + [0.05868193879723549, 1.5382329225540161, 1.0444575548171997, 0.12355764210224152, 0.6412859559059143] + ], + [ + [0.0939386859536171, 1.9695827960968018, 1.5053801536560059, -0.0004540873342193663, 1.3116685152053833] + ], + [ + [1.4345933198928833, 0.20559696853160858, 0.9539335370063782, 0.3210548460483551, 0.888896107673645] + ] + ], + "expected_grad": [ + [ + [1.0, 1.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 1.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 1.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 1.0] + ] + ] + }, + { + "test_name": "max_dim_-1_keepdim_False", + "op_name": "max", + "dim": -1, + "keepdim": false, + "input": [ + [ + [1.102234125137329, 0.1452106386423111, -1.0025994777679443, -0.8691359758377075, 1.034902811050415], + [1.141363263130188, -0.6113547086715698, 0.5669911503791809, 0.08074041455984116, 0.769339919090271], + [-1.206941843032837, -0.1684180051088333, -1.0213230848312378, 0.45474377274513245, 0.05628223717212677], + [0.1908469796180725, 1.134475827217102, -0.6270834803581238, -0.2348310351371765, 0.1834784746170044] + ], + [ + [0.8270988464355469, 0.6817458271980286, 0.40630635619163513, 1.706158995628357, -0.3331676423549652], + [1.3781265020370483, 0.22484587132930756, -2.3762154579162598, 0.4015538990497589, -2.294616460800171], + [0.9543997049331665, -0.3883368968963623, -0.6146398186683655, -0.4274226725101471, -1.4890133142471313], + [0.5850116610527039, -0.6405912041664124, -1.9063634872436523, -0.21498170495033264, 0.1672649085521698] + ], + [ + [-0.02165069803595543, 0.7171346545219421, -1.3825470209121704, 0.5667335987091064, 0.1853654533624649], + [1.7064272165298462, 2.270893096923828, 0.6268392205238342, 1.6014127731323242, 1.196624517440796], + [0.07062987983226776, -0.06806328892707825, -0.8778694272041321, -0.666265070438385, 0.34564408659935], + [-0.1972421109676361, -1.5218473672866821, -0.06109791249036789, -0.49620160460472107, 1.5591261386871338] + ] + ], + "expected_output": [ + [1.102234125137329, 1.141363263130188, 0.45474377274513245, 1.134475827217102], + [1.706158995628357, 1.3781265020370483, 0.9543997049331665, 0.5850116610527039], + [0.7171346545219421, 2.270893096923828, 0.34564408659935, 1.5591261386871338] + ], + "expected_grad": [ + [ + [1.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 1.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 1.0] + ] + ] + }, + { + "test_name": "max_dim_-1_keepdim_True", + "op_name": "max", + "dim": -1, + "keepdim": true, + "input": [ + [ + [0.08976225554943085, -2.1487770080566406, -0.26685312390327454, 0.06450489908456802, -1.4932622909545898], + [1.6294554471969604, -1.738688588142395, 1.7332375049591064, 2.0544910430908203, 1.293807029724121], + [0.8169977068901062, -1.0389111042022705, -1.4858906269073486, 1.4143908023834229, 0.7241055965423584], + [1.134980320930481, -0.2217475324869156, 2.139716625213623, 0.19970077276229858, 0.8841625452041626] + ], + [ + [0.949975848197937, -0.10251139849424362, 0.9830695986747742, -1.6503483057022095, 0.10293407738208771], + [-1.2885284423828125, -0.7290129065513611, -1.3367583751678467, 0.28592362999916077, 1.0034847259521484], + [0.6469754576683044, 1.3000932931900024, -0.634432315826416, 0.09809456765651703, -2.255279064178467], + [-1.0050581693649292, 0.27072352170944214, 0.2964065372943878, -0.010661646723747253, 0.32635602355003357] + ], + [ + [-0.779302179813385, 1.35313880443573, 0.9089425206184387, -1.2521377801895142, -0.1434730738401413], + [0.10889503359794617, 0.09379234164953232, -0.7034569978713989, -0.17944560945034027, 0.45458918809890747], + [-0.26298829913139343, 0.39442673325538635, 0.3479395806789398, -1.4978502988815308, 0.7075753211975098], + [0.693645179271698, -0.07173124700784683, -0.2537860572338104, 0.2973843812942505, -1.541783332824707] + ] + ], + "expected_output": [ + [ + [0.08976225554943085], + [2.0544910430908203], + [1.4143908023834229], + [2.139716625213623] + ], + [ + [0.9830695986747742], + [1.0034847259521484], + [1.3000932931900024], + [0.32635602355003357] + ], + [ + [1.35313880443573], + [0.45458918809890747], + [0.7075753211975098], + [0.693645179271698] + ] + ], + "expected_grad": [ + [ + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0] + ], + [ + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [1.0, 0.0, 0.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "min_dim_None_keepdim_False", + "op_name": "min", + "dim": null, + "keepdim": false, + "input": [ + [ + [-1.4054443836212158, 0.007234930992126465, 0.5773534178733826, 0.30951234698295593, -0.8588898777961731], + [0.526231050491333, -0.06828558444976807, 1.2680625915527344, 0.23789522051811218, -1.805619716644287], + [1.746630311012268, -1.526967167854309, 0.3485613167285919, 0.7839732766151428, 1.5388095378875732], + [1.1633633375167847, 0.8717845678329468, 0.21581949293613434, 0.1374770998954773, -1.1221222877502441] + ], + [ + [0.1467854380607605, -0.17452457547187805, -0.7465326189994812, -1.163095235824585, -1.0377240180969238], + [0.6390634179115295, -0.929384708404541, 0.07479915767908096, 0.641934871673584, 1.1865335702896118], + [-0.7681276798248291, -0.8917787075042725, 0.19665859639644623, 0.6306189298629761, 1.6684050559997559], + [1.258668065071106, 0.16344283521175385, 0.8060418367385864, -0.6885544061660767, -0.23013491928577423] + ], + [ + [1.1331552267074585, 0.42492198944091797, -0.07918443530797958, 0.3510800898075104, 0.45417457818984985], + [0.3879549503326416, -0.6664285659790039, -0.5420844554901123, 2.9302103519439697, 0.03980718180537224], + [-1.2734509706497192, 1.154341697692871, 0.7456277012825012, -0.46407395601272583, 0.354059636592865], + [0.5733898282051086, -1.9253568649291992, 1.3460670709609985, -0.9440423846244812, 0.5850473642349243] + ] + ], + "expected_output": -1.9253568649291992, + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "min_dim_0_keepdim_False", + "op_name": "min", + "dim": 0, + "keepdim": false, + "input": [ + [ + [-0.7561046481132507, -0.6087163090705872, 0.06523545831441879, 0.2073671668767929, -0.4843977093696594], + [-0.4931473731994629, 0.695488691329956, 1.5209262371063232, -1.5004045963287354, 1.424126148223877], + [0.4264316260814667, 1.725341796875, -1.1767886877059937, -0.5571966767311096, 2.1753745079040527], + [-0.4421183466911316, -1.742424488067627, -0.7672881484031677, 1.4304224252700806, 0.39234694838523865] + ], + [ + [-1.3970006704330444, -2.0020906925201416, -0.26867592334747314, -0.701147735118866, 0.31265851855278015], + [-0.3883015811443329, 0.8928799033164978, 0.7249560952186584, 0.4335167407989502, 0.9744104146957397], + [-0.21621425449848175, -0.29577603936195374, -0.2591007351875305, 0.7960927486419678, 0.5791953802108765], + [-0.13450507819652557, -1.3556065559387207, -0.394126832485199, -0.7559952735900879, 0.5876944661140442] + ], + [ + [-1.3335460424423218, 0.9432480931282043, -1.1379897594451904, -1.1207023859024048, -0.6139907240867615], + [0.9939887523651123, 0.5297332406044006, -0.3778044879436493, 1.417391061782837, 0.4910713732242584], + [0.9159647822380066, 0.1735081523656845, -1.438053846359253, 0.6212812662124634, -1.969196081161499], + [-0.2445336878299713, -0.6776713728904724, 0.47822806239128113, -1.8905819654464722, 0.2990115284919739] + ] + ], + "expected_output": [ + [-1.3970006704330444, -2.0020906925201416, -1.1379897594451904, -1.1207023859024048, -0.6139907240867615], + [-0.4931473731994629, 0.5297332406044006, -0.3778044879436493, -1.5004045963287354, 0.4910713732242584], + [-0.21621425449848175, -0.29577603936195374, -1.438053846359253, -0.5571966767311096, -1.969196081161499], + [-0.4421183466911316, -1.742424488067627, -0.7672881484031677, -1.8905819654464722, 0.2990115284919739] + ], + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [1.0, 1.0, 1.0, 0.0, 0.0] + ], + [ + [1.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 1.0, 1.0, 1.0], + [0.0, 1.0, 1.0, 0.0, 1.0], + [0.0, 0.0, 1.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "min_dim_0_keepdim_True", + "op_name": "min", + "dim": 0, + "keepdim": true, + "input": [ + [ + [0.4874853193759918, -1.5288264751434326, 0.3616181015968323, 0.4444722533226013, 0.37175261974334717], + [0.17289385199546814, -1.2141680717468262, 1.9441745281219482, -0.5341972708702087, -0.07878366857767105], + [0.9717959761619568, 0.15757296979427338, -0.1293685883283615, 0.9459248185157776, 0.7486283779144287], + [1.3489890098571777, -0.11803387850522995, 0.8932477235794067, 0.8591998815536499, 1.068840742111206] + ], + [ + [1.035352349281311, -1.0888819694519043, -0.9833644032478333, 0.18595486879348755, -0.5669856071472168], + [-1.0098682641983032, -0.4541301131248474, 0.6979333758354187, 0.546902596950531, 0.5524624586105347], + [0.2402925342321396, -1.1295276880264282, 2.641334295272827, -0.42235150933265686, 0.3279983699321747], + [-0.22356857359409332, 0.6338478326797485, 1.2712687253952026, 0.5257611274719238, -0.08859143406152725] + ], + [ + [0.2868587076663971, 1.34219491481781, -0.4532349705696106, 1.0591752529144287, -0.007002960424870253], + [1.0370234251022339, 0.12661591172218323, 0.9138452410697937, 0.15242700278759003, -1.77699875831604], + [-0.2700897455215454, 1.132546067237854, -0.4591978192329407, 0.7583627104759216, -0.25366878509521484], + [-0.6026967763900757, 0.41119346022605896, -0.4785611629486084, -0.3798253536224365, -0.6784350275993347] + ] + ], + "expected_output": [ + [ + [0.2868587076663971, -1.5288264751434326, -0.9833644032478333, 0.18595486879348755, -0.5669856071472168], + [-1.0098682641983032, -1.2141680717468262, 0.6979333758354187, -0.5341972708702087, -1.77699875831604], + [-0.2700897455215454, -1.1295276880264282, -0.4591978192329407, -0.42235150933265686, -0.25366878509521484], + [-0.6026967763900757, -0.11803387850522995, -0.4785611629486084, -0.3798253536224365, -0.6784350275993347] + ] + ], + "expected_grad": [ + [ + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 1.0, 1.0, 1.0], + [1.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [1.0, 0.0, 1.0, 0.0, 1.0], + [1.0, 0.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "min_dim_1_keepdim_False", + "op_name": "min", + "dim": 1, + "keepdim": false, + "input": [ + [ + [1.1780027151107788, 0.3302285671234131, 0.3409285247325897, 0.0966181680560112, 0.8048174381256104], + [-0.9308503866195679, -0.7495615482330322, -1.1345824003219604, 1.743110179901123, 0.24290774762630463], + [0.2789574861526489, 0.3315619230270386, -0.05199375003576279, -0.38996851444244385, 0.45961233973503113], + [1.5001028776168823, 0.4969956576824188, 0.8539602756500244, -1.2495394945144653, -1.8244128227233887] + ], + [ + [1.9353760480880737, 0.32966846227645874, -0.5037038922309875, 0.5824847221374512, -0.04406149685382843], + [0.26977765560150146, -1.149574875831604, 0.5576037764549255, -0.4423902630805969, -1.1938692331314087], + [0.5121238231658936, 0.3450745940208435, 1.2737618684768677, -0.5269590616226196, 0.36447712779045105], + [-1.3918713331222534, -0.13599461317062378, 0.5021628141403198, 0.3100223243236542, 0.08071848005056381] + ], + [ + [0.2132561206817627, -0.21911364793777466, 1.0261167287826538, -0.15927140414714813, 0.12461294233798981], + [1.5179888010025024, 0.2487545758485794, -0.9334361553192139, 1.019239902496338, 0.0731339082121849], + [-0.531711220741272, -0.03542341664433479, -0.5915296077728271, -0.014602077193558216, 0.2609618306159973], + [0.1303926557302475, 1.2057738304138184, 2.191358804702759, -0.9426639676094055, -0.3424133360385895] + ] + ], + "expected_output": [ + [-0.9308503866195679, -0.7495615482330322, -1.1345824003219604, -1.2495394945144653, -1.8244128227233887], + [-1.3918713331222534, -1.149574875831604, -0.5037038922309875, -0.5269590616226196, -1.1938692331314087], + [-0.531711220741272, -0.21911364793777466, -0.9334361553192139, -0.9426639676094055, -0.3424133360385895] + ], + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 1.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 1.0] + ], + [ + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "min_dim_1_keepdim_True", + "op_name": "min", + "dim": 1, + "keepdim": true, + "input": [ + [ + [1.2685400247573853, 0.1908181607723236, 0.6423386931419373, 0.5231516361236572, 0.3537774980068207], + [0.14756281673908234, -0.6777421832084656, -0.14288291335105896, 0.918861985206604, 0.15021729469299316], + [-0.7345491647720337, 1.491151213645935, -1.0409657955169678, 0.9087132811546326, 0.3067242503166199], + [2.699211597442627, -1.8668535947799683, 1.0313161611557007, -1.0959745645523071, -1.1971251964569092] + ], + [ + [0.3215359151363373, -0.21750344336032867, 0.8391018509864807, -1.0961977243423462, 1.1836506128311157], + [-1.63896906375885, -0.5325616598129272, -0.04153263941407204, 0.8015749454498291, -0.6946583986282349], + [0.20143039524555206, -0.9889429807662964, 0.20942160487174988, 0.9820282459259033, -1.3028837442398071], + [-0.561603844165802, 0.8338823318481445, -0.26739558577537537, 0.23192813992500305, 1.012840986251831] + ], + [ + [-1.4937485456466675, 0.9061921834945679, -1.4567253589630127, 1.2727082967758179, 1.6362823247909546], + [-0.9645407795906067, 1.1407610177993774, 0.41566601395606995, 2.026231527328491, -1.0126259326934814], + [0.9119246602058411, -0.28909119963645935, -0.39347609877586365, 0.4535541534423828, -1.1785883903503418], + [0.789819061756134, 1.1420683860778809, 0.5569579005241394, 0.6643039584159851, -0.19934415817260742] + ] + ], + "expected_output": [ + [ + [-0.7345491647720337, -1.8668535947799683, -1.0409657955169678, -1.0959745645523071, -1.1971251964569092] + ], + [ + [-1.63896906375885, -0.9889429807662964, -0.26739558577537537, -1.0961977243423462, -1.3028837442398071] + ], + [ + [-1.4937485456466675, -0.28909119963645935, -1.4567253589630127, 0.4535541534423828, -1.1785883903503418] + ] + ], + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 1.0] + ], + [ + [0.0, 0.0, 0.0, 1.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 1.0, 0.0, 0.0] + ], + [ + [1.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "min_dim_-1_keepdim_False", + "op_name": "min", + "dim": -1, + "keepdim": false, + "input": [ + [ + [-0.9770270586013794, -0.584597110748291, -1.5499167442321777, 0.3021530210971832, -0.34727850556373596], + [-0.20263050496578217, -0.27478426694869995, -0.017359629273414612, -0.6773730516433716, 0.6478610038757324], + [-0.9755827188491821, 1.8391462564468384, -0.0034865080378949642, 0.4103865623474121, 1.752099633216858], + [0.66091388463974, -1.0182898044586182, -1.101527452468872, 0.9167301654815674, 1.1085376739501953] + ], + [ + [-2.0887160301208496, 2.1544744968414307, 0.9984968304634094, -0.013719181530177593, -0.9246350526809692], + [-1.5019725561141968, 0.7051315307617188, 0.37148424983024597, -0.714340090751648, -0.15940190851688385], + [0.34379199147224426, 0.4355435073375702, -0.2341606765985489, 0.18855510652065277, 0.019730882719159126], + [-0.24155908823013306, -1.8511996269226074, 0.2684777081012726, -0.30269020795822144, -0.3692476451396942] + ], + [ + [-0.8595789074897766, 0.2909909188747406, 0.6529440879821777, 2.0979738235473633, 0.43178966641426086], + [-1.228909969329834, -0.06794015318155289, -2.2453958988189697, 1.5783367156982422, -0.5161778330802917], + [-0.19803878664970398, 0.8383589386940002, 1.144734501838684, -1.3692904710769653, -0.3902406692504883], + [0.950104296207428, -2.147958517074585, -0.06174112856388092, 0.2618931531906128, 0.4153856635093689] + ] + ], + "expected_output": [ + [-1.5499167442321777, -0.6773730516433716, -0.9755827188491821, -1.101527452468872], + [-2.0887160301208496, -1.5019725561141968, -0.2341606765985489, -1.8511996269226074], + [-0.8595789074897766, -2.2453958988189697, -1.3692904710769653, -2.147958517074585] + ], + "expected_grad": [ + [ + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0] + ], + [ + [1.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0] + ], + [ + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "min_dim_-1_keepdim_True", + "op_name": "min", + "dim": -1, + "keepdim": true, + "input": [ + [ + [0.5631878972053528, 0.19998317956924438, 0.8093208074569702, -0.5293470621109009, 0.448284387588501], + [-0.8013262152671814, 0.6317405700683594, 1.3970056772232056, -1.3542675971984863, 0.13657432794570923], + [0.8511560559272766, 1.1925286054611206, 0.5439687371253967, -1.2077754735946655, 1.1914323568344116], + [1.2834670543670654, 0.8253968954086304, 1.7004607915878296, 0.9272356629371643, 0.026323553174734116] + ], + [ + [0.7056452035903931, 1.0615094900131226, -1.6115124225616455, 0.45299890637397766, -1.1804884672164917], + [0.5873482823371887, -0.1269814372062683, 0.05342012271285057, 1.0922929048538208, -1.3024108409881592], + [1.3521305322647095, -1.1749324798583984, 0.11568229645490646, 0.21853865683078766, 1.1232846975326538], + [-0.7424628138542175, 0.22881479561328888, -0.4537980556488037, 0.797391414642334, 1.4051463603973389] + ], + [ + [-0.5578140616416931, 2.0406908988952637, -2.4602458477020264, 1.2475563287734985, 0.46312710642814636], + [0.3421548902988434, -0.1932428628206253, 0.04506998881697655, 0.10006450116634369, -0.3069831430912018], + [-0.7462528347969055, -1.4819915294647217, -0.782849133014679, -0.3068717420101166, 1.822840690612793], + [0.24596092104911804, 0.26649534702301025, -0.518925130367279, -2.0522587299346924, -1.131065011024475] + ] + ], + "expected_output": [ + [ + [-0.5293470621109009], + [-1.3542675971984863], + [-1.2077754735946655], + [0.026323553174734116] + ], + [ + [-1.6115124225616455], + [-1.3024108409881592], + [-1.1749324798583984], + [-0.7424628138542175] + ], + [ + [-2.4602458477020264], + [-0.3069831430912018], + [-1.4819915294647217], + [-2.0522587299346924] + ] + ], + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0] + ], + [ + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0] + ] + ] + } + ], + "linear": [ + { + "test_name": "linear_2D_input", + "in_features": 10, + "out_features": 5, + "input": [ + [1.995280146598816, -0.6346070170402527, 1.0690128803253174, 1.160980463027954, -1.7790907621383667, 0.8086466789245605, -0.1060129776597023, 2.2590320110321045, 0.3341250419616699, -0.27466103434562683], + [-0.29712751507759094, -0.5143458247184753, 1.5861903429031372, 1.3935821056365967, -0.08091596513986588, -0.561027467250824, -0.4829641580581665, 0.4769587814807892, 0.17047974467277527, 0.845003604888916], + [1.797692060470581, 1.4310468435287476, -0.1232481449842453, 0.0116643775254488, 0.3026885390281677, -0.7567965984344482, 0.7330871224403381, -1.003272533416748, 0.2109839767217636, 1.4640601873397827] + ], + "weight": [ + [-0.24328012764453888, 0.028091339394450188, -0.16780336201190948, -0.06218525767326355, -0.04895494133234024, 0.015718938782811165, -0.27727454900741577, 0.2699316143989563, -0.1345617175102234, 0.09880602359771729], + [0.2177683264017105, 0.12743021547794342, 0.2214605212211609, 0.17856846749782562, 0.23155508935451508, 0.06674247980117798, 0.2019743025302887, -0.1374560296535492, -0.17039762437343597, 0.15100044012069702], + [0.08297339081764221, -0.011858473531901836, -0.26674091815948486, 0.1033318042755127, 0.20483756065368652, -0.036509934812784195, 0.07265040278434753, 0.14077845215797424, -0.312299907207489, -0.05202847719192505], + [-0.11730943620204926, -0.1616298258304596, 0.19328103959560394, 0.3135358691215515, 0.050457291305065155, 0.19681504368782043, -0.2346036732196808, 0.055711012333631516, -0.04089835286140442, -0.1950366348028183], + [0.06484019756317139, 0.10066336393356323, -0.015273772180080414, 0.134284108877182, 0.15182378888130188, 0.2868318259716034, 0.018331248313188553, 0.011266625486314297, -0.004526200238615274, 0.04364045336842537] + ], + "bias": [-0.2050478756427765, -0.03892422094941139, -0.2674124538898468, -0.22181136906147003, -0.26935896277427673], + "expected_output": [ + [-0.2929801046848297, -0.029548827558755875, -0.4331992268562317, 0.4773373007774353, -0.0924459844827652], + [-0.18168585002422333, 0.31021445989608765, -0.6263059377670288, 0.49329322576522827, -0.3180721402168274], + [-0.9667624235153198, 1.0003747940063477, -0.24155186116695404, -1.339890480041504, -0.11133930087089539] + ], + "expected_grad_input": [ + [0.004992350935935974, 0.08269661664962769, -0.03507646173238754, 0.6675350666046143, 0.5897188186645508, 0.5295983552932739, -0.21892227232456207, 0.34023168683052063, -0.6626837849617004, 0.04638180509209633], + [0.004992350935935974, 0.08269661664962769, -0.03507646173238754, 0.6675350666046143, 0.5897188186645508, 0.5295983552932739, -0.21892227232456207, 0.34023168683052063, -0.6626837849617004, 0.04638180509209633], + [0.004992350935935974, 0.08269661664962769, -0.03507646173238754, 0.6675350666046143, 0.5897188186645508, 0.5295983552932739, -0.21892227232456207, 0.34023168683052063, -0.6626837849617004, 0.04638180509209633] + ], + "expected_grad_weight": [ + [3.495844841003418, 0.28209400177001953, 2.5319552421569824, 2.5662269592285156, -1.5573182106018066, -0.5091773867607117, 0.14410996437072754, 1.7327182292938232, 0.7155888080596924, 2.034402847290039], + [3.495844841003418, 0.28209400177001953, 2.5319552421569824, 2.5662269592285156, -1.5573182106018066, -0.5091773867607117, 0.14410996437072754, 1.7327182292938232, 0.7155888080596924, 2.034402847290039], + [3.495844841003418, 0.28209400177001953, 2.5319552421569824, 2.5662269592285156, -1.5573182106018066, -0.5091773867607117, 0.14410996437072754, 1.7327182292938232, 0.7155888080596924, 2.034402847290039], + [3.495844841003418, 0.28209400177001953, 2.5319552421569824, 2.5662269592285156, -1.5573182106018066, -0.5091773867607117, 0.14410996437072754, 1.7327182292938232, 0.7155888080596924, 2.034402847290039], + [3.495844841003418, 0.28209400177001953, 2.5319552421569824, 2.5662269592285156, -1.5573182106018066, -0.5091773867607117, 0.14410996437072754, 1.7327182292938232, 0.7155888080596924, 2.034402847290039] + ], + "expected_grad_bias": [3.0, 3.0, 3.0, 3.0, 3.0] + }, + { + "test_name": "linear_3D_batched_input", + "in_features": 4, + "out_features": 2, + "input": [ + [ + [-0.3233233392238617, -0.3046680688858032, -0.22401957213878632, 0.2504769563674927], + [1.7058311700820923, 1.3944706916809082, 1.136904239654541, 1.1797000169754028], + [0.6420789957046509, -3.0800278186798096, -1.3616151809692383, -0.2960043251514435] + ], + [ + [1.1283513307571411, -0.7886322736740112, -0.038704462349414825, 0.6835862398147583], + [0.5444626212120056, 0.454408198595047, 0.9106372594833374, 0.9990993142127991], + [-0.27131080627441406, -0.9536606073379517, -0.6736571192741394, 1.259739637374878] + ] + ], + "weight": [ + [0.13263213634490967, -0.20593488216400146, 0.0932854413986206, 0.43565016984939575], + [0.22836577892303467, 0.4893844723701477, -0.21178466081619263, -0.23876512050628662] + ], + "bias": [-0.06348574161529541, -0.10069990158081055], + "expected_output": [ + [ + [0.04459554702043533, -0.33599695563316345], + [0.49558526277542114, 0.44883573055267334], + [0.39998650550842285, -1.1023441553115845] + ], + [ + [0.5427707433700562, -0.3839870095252991], + [0.4353559911251068, -0.18539203703403473], + [0.5828850865364075, -0.7874763607978821] + ] + ], + "expected_grad_input": [ + [ + [0.36099791526794434, 0.28344959020614624, -0.11849921941757202, 0.19688504934310913], + [0.36099791526794434, 0.28344959020614624, -0.11849921941757202, 0.19688504934310913], + [0.36099791526794434, 0.28344959020614624, -0.11849921941757202, 0.19688504934310913] + ], + [ + [0.36099791526794434, 0.28344959020614624, -0.11849921941757202, 0.19688504934310913], + [0.36099791526794434, 0.28344959020614624, -0.11849921941757202, 0.19688504934310913], + [0.36099791526794434, 0.28344959020614624, -0.11849921941757202, 0.19688504934310913] + ] + ], + "expected_grad_weight": [ + [3.4260900020599365, -3.2781100273132324, -0.250454843044281, 4.076597690582275], + [3.4260900020599365, -3.2781100273132324, -0.250454843044281, 4.076597690582275] + ], + "expected_grad_bias": [6.0, 6.0] + } + ], + "optimizers": [ + { + "test_name": "SGD_basic", + "optimizer": "SGD", + "kwargs": { + "lr": 0.1 + }, + "initial_weight": [ + [1.1711158752441406, -0.2590734362602234, -0.3033114969730377], + [0.2349793165922165, 1.0358152389526367, 0.715836763381958], + [-0.7037585973739624, -0.284278929233551, 0.7185912728309631] + ], + "input_x": [ + [1.8401010036468506, 0.04152654483914375, -1.7044862508773804], + [-0.3476502001285553, -0.3426065444946289, 0.11469119042158127], + [-0.4225151538848877, 0.7643097639083862, -0.5040589570999146] + ], + "expected_grad": [ + [1.8401010036468506, 0.04152654483914375, -1.7044862508773804], + [-0.3476502001285553, -0.3426065444946289, 0.11469119042158127], + [-0.4225151538848877, 0.7643097639083862, -0.5040589570999146] + ], + "expected_updated_weight": [ + [0.9871057868003845, -0.26322609186172485, -0.1328628659248352], + [0.269744336605072, 1.0700758695602417, 0.7043676376342773], + [-0.6615070700645447, -0.36070990562438965, 0.7689971923828125] + ] + }, + { + "test_name": "SGD_momentum", + "optimizer": "SGD", + "kwargs": { + "lr": 0.1, + "momentum": 0.9 + }, + "initial_weight": [ + [-0.9779782891273499, 1.0851811170578003, 0.24460527300834656], + [0.10595189034938812, 0.9393600821495056, 0.3916529417037964], + [0.4106380343437195, -0.0652524009346962, 0.5183061361312866] + ], + "input_x": [ + [0.05883641913533211, 0.5554778575897217, 1.053452968597412], + [-0.2609460651874542, -1.6987152099609375, -0.08697007596492767], + [2.333068609237671, -0.035302918404340744, 2.392319917678833] + ], + "expected_grad": [ + [0.05883641913533211, 0.5554778575897217, 1.053452968597412], + [-0.2609460651874542, -1.6987152099609375, -0.08697007596492767], + [2.333068609237671, -0.035302918404340744, 2.392319917678833] + ], + "expected_updated_weight": [ + [-0.9838619232177734, 1.0296332836151123, 0.13925997912883759], + [0.13204649090766907, 1.1092315912246704, 0.4003499448299408], + [0.17733116447925568, -0.061722107231616974, 0.27907413244247437] + ] + }, + { + "test_name": "SGD_weight_decay", + "optimizer": "SGD", + "kwargs": { + "lr": 0.1, + "weight_decay": 0.01 + }, + "initial_weight": [ + [0.861420214176178, 1.0063271522521973, 0.9432656168937683], + [0.22092591226100922, -1.1125500202178955, 0.1605069637298584], + [1.8549803495407104, -0.16434448957443237, -0.4784829616546631] + ], + "input_x": [ + [0.38463443517684937, -0.3245149850845337, -1.5829708576202393], + [-0.6101211309432983, 0.2677954137325287, 1.1987781524658203], + [1.122973918914795, 1.4562538862228394, 0.9092239141464233] + ], + "expected_grad": [ + [0.38463443517684937, -0.3245149850845337, -1.5829708576202393], + [-0.6101211309432983, 0.2677954137325287, 1.1987781524658203], + [1.122973918914795, 1.4562538862228394, 0.9092239141464233] + ], + "expected_updated_weight": [ + [0.8220953345298767, 1.037772297859192, 1.1006194353103638], + [0.28171709179878235, -1.1382169723510742, 0.040468644350767136], + [1.7408279180526733, -0.3098055422306061, -0.5689268708229065] + ] + }, + { + "test_name": "SGD_nesterov", + "optimizer": "SGD", + "kwargs": { + "lr": 0.1, + "momentum": 0.9, + "nesterov": true + }, + "initial_weight": [ + [0.9442354440689087, 2.31821870803833, 0.64822918176651], + [-0.4594375491142273, 0.8558579683303833, 1.3270264863967896], + [0.9446682333946228, -0.30597713589668274, 0.44008857011795044] + ], + "input_x": [ + [-0.17014701664447784, 0.12107481062412262, 0.10815810412168503], + [0.5580024719238281, -1.8953827619552612, 0.12464731931686401], + [-0.3046552538871765, -0.7077358961105347, -1.7160437107086182] + ], + "expected_grad": [ + [-0.17014701664447784, 0.12107481062412262, 0.10815810412168503], + [0.5580024719238281, -1.8953827619552612, 0.12464731931686401], + [-0.3046552538871765, -0.7077358961105347, -1.7160437107086182] + ], + "expected_updated_weight": [ + [0.9765633940696716, 2.2952144145965576, 0.627679169178009], + [-0.5654579997062683, 1.2159806489944458, 1.3033435344696045], + [1.0025527477264404, -0.1715073138475418, 0.766136884689331] + ] + }, + { + "test_name": "Adam_basic", + "optimizer": "Adam", + "kwargs": { + "lr": 0.1 + }, + "initial_weight": [ + [-0.5965784788131714, -0.6017999053001404, 0.8372399806976318], + [-0.5020561218261719, -0.10574329644441605, -0.16421273350715637], + [-0.7310932874679565, 0.3742406666278839, 0.07989589124917984] + ], + "input_x": [ + [0.8226869702339172, 0.528905987739563, 0.3527827858924866], + [-0.4038589298725128, 0.4434446096420288, -0.6249487400054932], + [-1.4255669116973877, 0.8732106685638428, -2.0518009662628174] + ], + "expected_grad": [ + [0.8226869702339172, 0.528905987739563, 0.3527827858924866], + [-0.4038589298725128, 0.4434446096420288, -0.6249487400054932], + [-1.4255669116973877, 0.8732106685638428, -2.0518009662628174] + ], + "expected_updated_weight": [ + [-0.6965785026550293, -0.7017998695373535, 0.7372399568557739], + [-0.40205612778663635, -0.20574328303337097, -0.06421273946762085], + [-0.6310932636260986, 0.2742406725883484, 0.17989589273929596] + ] + }, + { + "test_name": "Adam_custom_betas", + "optimizer": "Adam", + "kwargs": { + "lr": 0.1, + "betas": [0.95, 0.999] + }, + "initial_weight": [ + [-0.9301124811172485, 0.7590786218643188, -0.012546157464385033], + [-0.16888298094272614, -0.7453446984291077, -1.6509697437286377], + [0.06193928048014641, -1.7777185440063477, -0.5515789985656738] + ], + "input_x": [ + [0.45636066794395447, -0.9359204769134521, 0.031871069222688675], + [0.12983688712120056, -0.07866957783699036, -0.38718998432159424], + [0.43904364109039307, -0.7205687761306763, -0.5494226813316345] + ], + "expected_grad": [ + [0.45636066794395447, -0.9359204769134521, 0.031871069222688675], + [0.12983688712120056, -0.07866957783699036, -0.38718998432159424], + [0.43904364109039307, -0.7205687761306763, -0.5494226813316345] + ], + "expected_updated_weight": [ + [-1.0301125049591064, 0.8590786457061768, -0.11254612356424332], + [-0.26888298988342285, -0.6453447341918945, -1.5509697198867798], + [-0.038060713559389114, -1.6777185201644897, -0.4515790045261383] + ] + }, + { + "test_name": "Adam_weight_decay", + "optimizer": "Adam", + "kwargs": { + "lr": 0.1, + "weight_decay": 0.01 + }, + "initial_weight": [ + [1.25302255153656, -1.1181309223175049, 0.7164333462715149], + [-0.858248770236969, 0.20800915360450745, 0.880598783493042], + [1.2542186975479126, 0.13348865509033203, 0.39146795868873596] + ], + "input_x": [ + [0.9119104146957397, -0.17490753531455994, -0.37665173411369324], + [-0.9659009575843811, 0.9704499244689941, 0.2379537969827652], + [-0.7329998016357422, -0.7835293412208557, 0.41817715764045715] + ], + "expected_grad": [ + [0.9119104146957397, -0.17490753531455994, -0.37665173411369324], + [-0.9659009575843811, 0.9704499244689941, 0.2379537969827652], + [-0.7329998016357422, -0.7835293412208557, 0.41817715764045715] + ], + "expected_updated_weight": [ + [1.1530225276947021, -1.018130898475647, 0.8164333701133728], + [-0.7582488059997559, 0.10800916701555252, 0.7805988192558289], + [1.3542187213897705, 0.23348864912986755, 0.29146796464920044] + ] + }, + { + "test_name": "Adam_amsgrad", + "optimizer": "Adam", + "kwargs": { + "lr": 0.1, + "amsgrad": true + }, + "initial_weight": [ + [-0.9935426712036133, 0.88087397813797, -0.38332679867744446], + [0.5941327810287476, -0.8920964002609253, 0.6283209323883057], + [0.6961200833320618, -2.5605015754699707, -0.30756813287734985] + ], + "input_x": [ + [1.6817193031311035, -1.6232244968414307, 1.4739271402359009], + [-0.3031364381313324, -0.02838980406522751, 0.1726396530866623], + [-0.021278811618685722, -0.6296552419662476, -0.17740698158740997] + ], + "expected_grad": [ + [1.6817193031311035, -1.6232244968414307, 1.4739271402359009], + [-0.3031364381313324, -0.02838980406522751, 0.1726396530866623], + [-0.021278811618685722, -0.6296552419662476, -0.17740698158740997] + ], + "expected_updated_weight": [ + [-1.0935426950454712, 0.9808739423751831, -0.48332679271698], + [0.6941328048706055, -0.7920964360237122, 0.5283209681510925], + [0.7961200475692749, -2.4605016708374023, -0.20756813883781433] + ] + }, + { + "test_name": "Adagrad_basic", + "optimizer": "Adagrad", + "kwargs": { + "lr": 0.1 + }, + "initial_weight": [ + [-0.030310818925499916, -1.4054933786392212, -0.9484650492668152], + [1.147447109222412, -1.0169785022735596, -0.5370362401008606], + [-0.0900096744298935, -1.1404632329940796, 0.6373375058174133] + ], + "input_x": [ + [0.02195807546377182, 1.4306976795196533, 0.40245044231414795], + [-0.5587246417999268, -1.8270106315612793, 0.11520372331142426], + [-0.9051556587219238, -0.9027037024497986, 0.8424926400184631] + ], + "expected_grad": [ + [0.02195807546377182, 1.4306976795196533, 0.40245044231414795], + [-0.5587246417999268, -1.8270106315612793, 0.11520372331142426], + [-0.9051556587219238, -0.9027037024497986, 0.8424926400184631] + ], + "expected_updated_weight": [ + [-0.1303108185529709, -1.505493402481079, -1.0484650135040283], + [1.24744713306427, -0.9169784784317017, -0.6370362639427185], + [0.009990327060222626, -1.0404632091522217, 0.5373374819755554] + ] + }, + { + "test_name": "Adagrad_lr_decay", + "optimizer": "Adagrad", + "kwargs": { + "lr": 0.1, + "lr_decay": 0.01 + }, + "initial_weight": [ + [-1.0972191095352173, 0.6440517902374268, 1.843495488166809], + [1.0326179265975952, -1.7598493099212646, 0.8623669743537903], + [0.5527029633522034, -0.4441142678260803, 1.171051263809204] + ], + "input_x": [ + [-0.4860379993915558, -0.8921645879745483, 0.8574724197387695], + [1.7369801998138428, -0.05432219058275223, -0.1720769703388214], + [0.5873350501060486, -0.3464932143688202, -0.39603132009506226] + ], + "expected_grad": [ + [-0.4860379993915558, -0.8921645879745483, 0.8574724197387695], + [1.7369801998138428, -0.05432219058275223, -0.1720769703388214], + [0.5873350501060486, -0.3464932143688202, -0.39603132009506226] + ], + "expected_updated_weight": [ + [-0.9972190856933594, 0.7440518140792847, 1.7434954643249512], + [0.9326179027557373, -1.6598492860794067, 0.9623669981956482], + [0.45270296931266785, -0.3441142439842224, 1.271051287651062] + ] + }, + { + "test_name": "Adagrad_weight_decay", + "optimizer": "Adagrad", + "kwargs": { + "lr": 0.1, + "weight_decay": 0.01 + }, + "initial_weight": [ + [-0.17303436994552612, 0.5430752038955688, -0.08936239033937454], + [-0.06266004592180252, -0.39158639311790466, 0.6495233178138733], + [-2.4070839881896973, 0.14364595711231232, -0.31813719868659973] + ], + "input_x": [ + [-0.04361145943403244, 0.1876208633184433, 1.5964795351028442], + [0.997294008731842, 0.28074535727500916, -0.35832399129867554], + [-0.6476782560348511, 0.7870728969573975, 0.7953746318817139] + ], + "expected_grad": [ + [-0.04361145943403244, 0.1876208633184433, 1.5964795351028442], + [0.997294008731842, 0.28074535727500916, -0.35832399129867554], + [-0.6476782560348511, 0.7870728969573975, 0.7953746318817139] + ], + "expected_updated_weight": [ + [-0.0730343610048294, 0.4430752098560333, -0.18936239182949066], + [-0.16266004741191864, -0.4915863871574402, 0.7495233416557312], + [-2.307084083557129, 0.0436459556221962, -0.41813719272613525] + ] + } + ], + "expand": [ + { + "test_name": "expand_1D_expand", + "input": [0.2794770896434784], + "expand_shape": [3], + "expected_output": [0.2794770896434784, 0.2794770896434784, 0.2794770896434784], + "expected_grad": [3.0] + }, + { + "test_name": "expand_prepend_2D", + "input": [0.6439860463142395, -0.24545544385910034, 1.243915319442749], + "expand_shape": [2, 3], + "expected_output": [ + [0.6439860463142395, -0.24545544385910034, 1.243915319442749], + [0.6439860463142395, -0.24545544385910034, 1.243915319442749] + ], + "expected_grad": [2.0, 2.0, 2.0] + }, + { + "test_name": "expand_expand_dim_0", + "input": [ + [-0.47988271713256836, -0.3970215618610382, -1.1943094730377197] + ], + "expand_shape": [4, 3], + "expected_output": [ + [-0.47988271713256836, -0.3970215618610382, -1.1943094730377197], + [-0.47988271713256836, -0.3970215618610382, -1.1943094730377197], + [-0.47988271713256836, -0.3970215618610382, -1.1943094730377197], + [-0.47988271713256836, -0.3970215618610382, -1.1943094730377197] + ], + "expected_grad": [ + [4.0, 4.0, 4.0] + ] + }, + { + "test_name": "expand_expand_middle_dim", + "input": [ + [ + [1.8904485702514648, 0.2907778024673462, 0.15576353669166565, -1.0259407758712769] + ], + [ + [-0.6083232760429382, 0.5305862426757812, 0.10105987638235092, 0.7012163400650024] + ] + ], + "expand_shape": [2, 5, 4], + "expected_output": [ + [ + [1.8904485702514648, 0.2907778024673462, 0.15576353669166565, -1.0259407758712769], + [1.8904485702514648, 0.2907778024673462, 0.15576353669166565, -1.0259407758712769], + [1.8904485702514648, 0.2907778024673462, 0.15576353669166565, -1.0259407758712769], + [1.8904485702514648, 0.2907778024673462, 0.15576353669166565, -1.0259407758712769], + [1.8904485702514648, 0.2907778024673462, 0.15576353669166565, -1.0259407758712769] + ], + [ + [-0.6083232760429382, 0.5305862426757812, 0.10105987638235092, 0.7012163400650024], + [-0.6083232760429382, 0.5305862426757812, 0.10105987638235092, 0.7012163400650024], + [-0.6083232760429382, 0.5305862426757812, 0.10105987638235092, 0.7012163400650024], + [-0.6083232760429382, 0.5305862426757812, 0.10105987638235092, 0.7012163400650024], + [-0.6083232760429382, 0.5305862426757812, 0.10105987638235092, 0.7012163400650024] + ] + ], + "expected_grad": [ + [ + [5.0, 5.0, 5.0, 5.0] + ], + [ + [5.0, 5.0, 5.0, 5.0] + ] + ] + }, + { + "test_name": "expand_preserve_with_negative_one", + "input": [ + [ + [-0.694018542766571], + [1.0377171039581299], + [0.6665804386138916] + ] + ], + "expand_shape": [2, -1, 4], + "expected_output": [ + [ + [-0.694018542766571, -0.694018542766571, -0.694018542766571, -0.694018542766571], + [1.0377171039581299, 1.0377171039581299, 1.0377171039581299, 1.0377171039581299], + [0.6665804386138916, 0.6665804386138916, 0.6665804386138916, 0.6665804386138916] + ], + [ + [-0.694018542766571, -0.694018542766571, -0.694018542766571, -0.694018542766571], + [1.0377171039581299, 1.0377171039581299, 1.0377171039581299, 1.0377171039581299], + [0.6665804386138916, 0.6665804386138916, 0.6665804386138916, 0.6665804386138916] + ] + ], + "expected_grad": [ + [ + [8.0], + [8.0], + [8.0] + ] + ] + } + ], + "conv": [ + { + "test_name": "conv1d_basic", + "conv_type": "Conv1d", + "in_channels": 1, + "out_channels": 1, + "kernel_size": 3, + "stride": 1, + "padding": 0, + "dilation": 1, + "groups": 1, + "has_bias": true, + "input": [ + [ + [-0.6090698838233948, 0.3285716772079468, -0.37654128670692444, -1.1339877843856812, -1.655726432800293] + ] + ], + "weight": [ + [ + [-0.11234099417924881, -0.17958854138851166, 0.16389460861682892] + ] + ], + "expected_output": [ + [ + [-0.4305786192417145, -0.5334253907203674, -0.4036937355995178] + ] + ], + "expected_grad_input": [ + [ + [-0.11234099417924881, -0.29192954301834106, -0.12803493440151215, -0.01569393277168274, 0.16389460861682892] + ] + ], + "expected_grad_weight": [ + [ + [-0.6570395231246948, -1.1819573640823364, -3.166255474090576] + ] + ], + "bias": [-0.3782813251018524], + "expected_grad_bias": [3.0] + }, + { + "test_name": "conv1d_stride_padding", + "conv_type": "Conv1d", + "in_channels": 2, + "out_channels": 3, + "kernel_size": 2, + "stride": 2, + "padding": 1, + "dilation": 1, + "groups": 1, + "has_bias": true, + "input": [ + [ + [-0.21146954596042633, 0.3081285357475281, 0.07774383574724197, 1.655027985572815, 0.5495442748069763, 0.1979023516178131], + [-0.33105704188346863, -1.3217003345489502, -0.024098770692944527, -0.11441737413406372, -0.2840602993965149, -0.10638472437858582] + ], + [ + [0.2790530323982239, -2.164227247238159, 0.7445889115333557, -2.1047589778900146, 0.5137941241264343, 0.3945278227329254], + [0.5612776279449463, -0.006535662803798914, 0.34217989444732666, -1.073784351348877, 1.5708034038543701, -1.1401112079620361] + ] + ], + "weight": [ + [ + [0.12821322679519653, 0.2354688048362732], + [-0.3317875862121582, 0.3078662157058716] + ], + [ + [-0.4692623019218445, -0.3428391218185425], + [-0.0340040922164917, -0.48575347661972046] + ], + [ + [-0.47526443004608154, -0.4438662528991699], + [0.40199440717697144, 0.10967749357223511] + ] + ], + "expected_output": [ + [ + [-0.3112514019012451, 0.3293813169002533, 0.1325710564851761, -0.09886481612920761], + [0.2127627283334732, -0.1351468712091446, -0.8437230587005615, -0.10980001091957092], + [-0.37825462222099304, -1.1507189273834229, -1.5434595346450806, -0.572631299495697] + ], + [ + [0.07897105813026428, -0.15417668223381042, 0.5314545035362244, 0.26932278275489807], + [-0.388862282037735, 0.5737738013267517, 0.06447575986385345, -0.16691800951957703], + [-0.4981119930744171, 0.29717516899108887, 0.07707838714122772, -1.0816327333450317] + ] + ], + "expected_grad_input": [ + [ + [-0.5512365698814392, -0.8163135051727295, -0.5512365698814392, -0.8163135051727295, -0.5512365698814392, -0.8163135051727295], + [-0.06820976734161377, 0.03620272874832153, -0.06820976734161377, 0.03620272874832153, -0.06820976734161377, 0.03620272874832153] + ], + [ + [-0.5512365698814392, -0.8163135051727295, -0.5512365698814392, -0.8163135051727295, -0.5512365698814392, -0.8163135051727295], + [-0.06820976734161377, 0.03620272874832153, -0.06820976734161377, 0.03620272874832153, -0.06820976734161377, 0.03620272874832153] + ] + ], + "expected_grad_weight": [ + [ + [-1.7133994102478027, 1.9532545804977417], + [-3.7629337310791016, 1.8350446224212646] + ], + [ + [-1.7133994102478027, 1.9532545804977417], + [-3.7629337310791016, 1.8350446224212646] + ], + [ + [-1.7133994102478027, 1.9532545804977417], + [-3.7629337310791016, 1.8350446224212646] + ] + ], + "bias": [-0.15953564643859863, -0.020549416542053223, -0.43580931425094604], + "expected_grad_bias": [8.0, 8.0, 8.0] + }, + { + "test_name": "conv1d_dilation_no_bias", + "conv_type": "Conv1d", + "in_channels": 2, + "out_channels": 2, + "kernel_size": 2, + "stride": 1, + "padding": 0, + "dilation": 2, + "groups": 1, + "has_bias": false, + "input": [ + [ + [-0.1598356068134308, -1.1955006122589111, -0.027892930433154106, 0.09312225133180618, -1.241369605064392], + [-0.770350456237793, -2.199479103088379, 0.4829457402229309, -0.7529877424240112, 1.244852900505066] + ] + ], + "weight": [ + [ + [0.45799875259399414, 0.08444952964782715], + [0.3647220730781555, -0.3561045527458191] + ], + [ + [-0.07056188583374023, -0.30873924493789673], + [-0.0554734468460083, -0.19889843463897705] + ] + ], + "expected_output": [ + [ + [-0.5285030603408813, -1.0737299919128418, -0.38476482033729553], + [-0.0334332175552845, 0.3273870348930359, 0.11083772778511047] + ] + ], + "expected_grad_input": [ + [ + [0.3874368667602539, 0.3874368667602539, 0.16314715147018433, -0.22428971529006958, -0.22428971529006958], + [0.3092486262321472, 0.3092486262321472, -0.24575436115264893, -0.5550029873847961, -0.5550029873847961] + ] + ], + "expected_grad_weight": [ + [ + [-1.38322913646698, -1.176140308380127], + [-2.4868838787078857, 0.9748108983039856] + ], + [ + [-1.38322913646698, -1.176140308380127], + [-2.4868838787078857, 0.9748108983039856] + ] + ], + "bias": null, + "expected_grad_bias": null + }, + { + "test_name": "conv1d_groups", + "conv_type": "Conv1d", + "in_channels": 4, + "out_channels": 4, + "kernel_size": 3, + "stride": 1, + "padding": 1, + "dilation": 1, + "groups": 2, + "has_bias": true, + "input": [ + [ + [-1.1388487815856934, 0.601990818977356, 0.8349303603172302, -0.1024642139673233, 0.2129386067390442, 0.8061029314994812], + [0.7021245956420898, -0.6485925316810608, -0.4390734136104584, 0.4836112856864929, 0.9263717532157898, 0.06930986046791077], + [0.6712534427642822, -0.02775866538286209, -0.9041587114334106, 0.8335179090499878, 0.7912061810493469, 1.1138908863067627], + [1.4326399564743042, 0.34999290108680725, 0.6649242639541626, -1.9941226243972778, -1.0177124738693237, 1.7738550901412964] + ] + ], + "weight": [ + [ + [-0.1318664252758026, -0.3887295722961426, -0.06299611181020737], + [0.2586928904056549, -0.38582274317741394, -0.03153581917285919] + ], + [ + [0.2975863814353943, -0.11652006953954697, -0.1508348137140274], + [-0.12314062565565109, -0.16540469229221344, 0.048861708492040634] + ], + [ + [-0.39306309819221497, 0.1560068577528, -0.10310549288988113], + [-0.05197756364941597, -0.027544153854250908, 0.22806160151958466] + ], + [ + [-0.253589391708374, -0.37149932980537415, -0.29530608654022217], + [-0.29948708415031433, -0.19876323640346527, 0.32244086265563965] + ] + ], + "expected_output": [ + [ + [-0.1614876687526703, -0.00653722882270813, -0.7269495725631714, -0.7288970947265625, -0.670366644859314, -0.44435811042785645], + [-0.3459082245826721, -0.7755998969078064, 0.03345651924610138, 0.007645025849342346, -0.6262635588645935, -0.3960774540901184], + [-0.18486422300338745, -0.440218985080719, -1.0401792526245117, -0.14069314301013947, -0.11561539769172668, -0.4659859538078308], + [-0.6181658506393433, -0.3822198212146759, -0.9882655143737793, -0.650033175945282, 0.33212974667549133, -0.8673254251480103] + ] + ], + "expected_grad_input": [ + [ + [-0.3395296633243561, -0.5533605813980103, -0.5533605813980103, -0.5533605813980103, -0.5533605813980103, -0.7190805673599243], + [-0.41567519307136536, -0.3983493149280548, -0.3983493149280548, -0.3983493149280548, -0.3983493149280548, -0.533901572227478], + [-0.862144947052002, -1.2605564594268799, -1.2605564594268799, -1.2605564594268799, -1.2605564594268799, -0.6139040589332581], + [-0.5777720212936401, -0.02726954221725464, -0.02726954221725464, -0.02726954221725464, -0.02726954221725464, 0.32419508695602417] + ] + ], + "expected_grad_weight": [ + [ + [0.4085468053817749, 1.2146496772766113, 2.3534984588623047], + [1.0244417190551758, 1.0937515497207642, 0.3916269540786743] + ], + [ + [0.4085468053817749, 1.2146496772766113, 2.3534984588623047], + [1.0244417190551758, 1.0937515497207642, 0.3916269540786743] + ], + [ + [1.3640601634979248, 2.4779510498046875, 1.8066976070404053], + [-0.5642780065536499, 1.2095770835876465, -0.22306287288665771] + ], + [ + [1.3640601634979248, 2.4779510498046875, 1.8066976070404053], + [-0.5642780065536499, 1.2095770835876465, -0.22306287288665771] + ] + ], + "bias": [-0.31582707166671753, -0.23997975885868073, -0.3328055143356323, -0.20508882403373718], + "expected_grad_bias": [6.0, 6.0, 6.0, 6.0] + }, + { + "test_name": "conv2d_basic", + "conv_type": "Conv2d", + "in_channels": 1, + "out_channels": 1, + "kernel_size": 3, + "stride": 1, + "padding": 0, + "dilation": 1, + "groups": 1, + "has_bias": true, + "input": [ + [ + [ + [1.1431289911270142, -0.35668206214904785, 1.0129450559616089, -0.07218152284622192, 0.03116939589381218], + [-0.755984902381897, -0.7956190705299377, -0.6868169903755188, -1.4755696058273315, -0.7580298185348511], + [-1.0949782133102417, 0.5647533535957336, -0.7126783132553101, 2.1329596042633057, -0.21151213347911835], + [0.760424017906189, 0.5244688391685486, 0.8789023160934448, -0.2014247626066208, 0.5469425320625305], + [-0.8171257972717285, 1.0141600370407104, 0.7154613733291626, -1.0755102634429932, -0.1033744290471077] + ] + ] + ], + "weight": [ + [ + [ + [0.3055322766304016, 0.09489969909191132, 0.12405240535736084], + [-0.14045104384422302, 0.23077929019927979, -0.23512959480285645], + [0.24297568202018738, -0.18113800883293152, 0.14157867431640625] + ] + ] + ], + "expected_output": [ + [ + [ + [-0.06470987945795059, 0.7260965704917908, -0.4693846106529236], + [0.15362200140953064, -1.4175024032592773, 0.4050457775592804], + [-0.9633260369300842, 0.3900090456008911, -0.10666545480489731] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [0.3055322766304016, 0.4004319906234741, 0.524484395980835, 0.21895210444927216, 0.12405240535736084], + [0.1650812327861786, 0.4907602369785309, 0.3796830177307129, 0.2146018147468567, -0.1110771894454956], + [0.40805691480636597, 0.5525978803634644, 0.583099365234375, 0.17504248023033142, 0.030501484870910645], + [0.10252463817596436, 0.15216591954231262, 0.05861499905586243, -0.04390963912010193, -0.0935509204864502], + [0.24297568202018738, 0.06183767318725586, 0.2034163475036621, -0.03955933451652527, 0.14157867431640625] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [-1.6819322109222412, -0.3888895511627197, -0.7397143244743347], + [-1.3175290822982788, 0.2289753556251526, -0.4872271418571472], + [1.8333874940872192, 3.841092109680176, 1.9697659015655518] + ] + ] + ], + "bias": [-0.12058798968791962], + "expected_grad_bias": [9.0] + }, + { + "test_name": "conv2d_stride_padding", + "conv_type": "Conv2d", + "in_channels": 2, + "out_channels": 3, + "kernel_size": 2, + "stride": 2, + "padding": 1, + "dilation": 1, + "groups": 1, + "has_bias": true, + "input": [ + [ + [ + [-1.7723859548568726, 0.0483609139919281, 0.20343580842018127, -0.6890958547592163, -0.5412123203277588, 2.0465071201324463], + [1.8344781398773193, -2.1705057621002197, 0.0502433180809021, 0.4375612735748291, 1.3833050727844238, -1.3894257545471191], + [0.3897654116153717, -1.5383917093276978, -1.0273215770721436, -0.536761462688446, 0.04652906581759453, -0.3498983681201935], + [-0.8770002722740173, -0.38342466950416565, -0.05906866118311882, 0.7605865001678467, 0.3754201829433441, -0.3296268582344055], + [-0.43347686529159546, -1.508413314819336, 2.2633509635925293, 0.88945072889328, 0.7355136275291443, -0.6224141120910645], + [-1.2468438148498535, -0.40940701961517334, 0.4983586370944977, -1.3945032358169556, -2.804393768310547, 0.4811320900917053] + ], + [ + [0.3296896517276764, 1.4936070442199707, 0.7378533482551575, -0.7039150595664978, 1.024785041809082, 0.12853768467903137], + [-0.46378275752067566, 1.3842717409133911, -0.6843701601028442, -0.3655944764614105, 0.41646215319633484, -0.9060968160629272], + [-0.48222219944000244, -0.9596375823020935, 0.88399738073349, -1.1032230854034424, 0.4613407552242279, -0.6266279220581055], + [1.1519485712051392, -0.7845342755317688, -0.3214779198169708, -0.32065340876579285, -1.381635308265686, -0.36659157276153564], + [-1.3767681121826172, -0.6575090289115906, -2.4963321685791016, -0.03116716258227825, 1.4799479246139526, 0.4248008131980896], + [-0.30355918407440186, 0.07046826928853989, -1.5127924680709839, 1.3581115007400513, 0.23847708106040955, 1.1590290069580078] + ] + ], + [ + [ + [0.08959023654460907, 3.1311562061309814, -0.8544708490371704, -0.8357695937156677, 1.2985942363739014, -0.7110929489135742], + [-0.004657623823732138, -0.9602146744728088, 0.09397149085998535, 0.8745917677879333, -1.226547122001648, 1.3209213018417358], + [1.0341976881027222, -0.49460795521736145, 1.004910945892334, -0.7041428685188293, 0.2728019058704376, -0.030417079105973244], + [0.08612233400344849, -1.7298319339752197, 0.08109258860349655, 1.2689827680587769, -1.36784029006958, 0.22102324664592743], + [1.3732529878616333, -0.32952919602394104, -0.540728747844696, 0.5175154209136963, -1.1346107721328735, 0.4711856544017792], + [-0.6981925368309021, -0.3024372160434723, 0.5673654675483704, 0.2462792843580246, 2.888267755508423, -0.5556330680847168] + ], + [ + [0.017246929928660393, -0.3958292305469513, 0.4055117070674896, -0.2378912717103958, -2.729581594467163, -0.12834908068180084], + [-0.4433963894844055, -1.7998695373535156, -0.4873202443122864, -0.360196590423584, -0.8537927269935608, 0.004195465240627527], + [-0.6274254322052002, 0.47283434867858887, -0.6854686141014099, 1.4168455600738525, 0.8578910827636719, 0.8829494118690491], + [-0.9081010818481445, -0.725821852684021, 0.03399703651666641, -0.4458200931549072, 0.151767760515213, 1.6231015920639038], + [-0.429679274559021, -0.16159531474113464, 0.30319663882255554, -0.4958910644054413, 0.29410406947135925, -0.3595946729183197], + [-0.13289135694503784, -0.09766664355993271, -1.9935065507888794, 0.11608057469129562, 1.3771235942840576, 1.2624166011810303] + ] + ] + ], + "weight": [ + [ + [ + [-0.0276422630995512, -0.2526994049549103], + [-0.2902931571006775, -0.08688648790121078] + ], + [ + [-0.1804211288690567, 0.28104352951049805], + [-0.2520372271537781, 0.21281561255455017] + ] + ], + [ + [ + [0.0672207698225975, -0.10753397643566132], + [0.2881980836391449, 0.19674864411354065] + ], + [ + [-0.01236567460000515, -0.23963071405887604], + [-0.10405980795621872, 0.1139952540397644] + ] + ], + [ + [ + [0.3063434362411499, -0.1193094253540039], + [-0.14693082869052887, -0.345439612865448] + ], + [ + [0.21730944514274597, 0.3450772762298584], + [0.2434139847755432, -0.1970040202140808] + ] + ] + ], + "expected_output": [ + [ + [ + [0.4774373769760132, 0.0021453690715134144, 0.8958447575569153, -0.37320539355278015], + [-0.4771265685558319, 0.8243272304534912, 0.6026365756988525, 0.7146704792976379], + [0.5433085560798645, 0.20568621158599854, -0.19235824048519135, 0.4021473526954651], + [0.48304125666618347, -0.29921460151672363, 0.8224841952323914, 0.03086499124765396] + ], + [ + [-0.4183177053928375, -0.12453565001487732, -0.22219520807266235, 0.4692375957965851], + [-0.17160357534885406, -0.5564679503440857, -0.29994887113571167, -0.22501324117183685], + [-0.5311521887779236, -0.24543151259422302, 0.8116174340248108, -0.3483940660953522], + [0.09963395446538925, 0.17334279417991638, 0.026701416820287704, -0.08917632699012756] + ], + [ + [0.7321466207504272, 0.32566869258880615, 0.09982049465179443, -0.08456266671419144], + [-0.23370768129825592, -0.24824100732803345, -0.07852044701576233, -0.5388195514678955], + [1.1079591512680054, -0.43546727299690247, -0.8573035001754761, 0.19905589520931244], + [0.2288532853126526, -0.5067504644393921, 0.4696616232395172, 0.584104061126709] + ] + ], + [ + [ + [0.24916411936283112, -0.3953704833984375, -0.1378743201494217, 0.4920520782470703], + [-0.09354253858327866, 0.23506797850131989, 0.370263934135437, 0.0023014498874545097], + [-0.23446066677570343, 0.6690057516098022, 0.8228655457496643, -0.0918242335319519], + [0.39236247539520264, -0.4243759512901306, -0.11730509996414185, 0.04087022319436073] + ], + [ + [-0.08759339898824692, 0.7145073413848877, -0.3789612054824829, -0.2987658679485321], + [0.13151949644088745, -0.1149776428937912, 0.09364865720272064, -0.11909044533967972], + [0.3223658800125122, -0.3813372850418091, 0.10539235174655914, 0.06081454083323479], + [-0.0002619511215016246, 0.29038575291633606, -0.7326545715332031, -0.16014693677425385] + ], + [ + [0.15049871802330017, -0.15628866851329803, 0.3388913571834564, 0.2580839693546295], + [-0.20125339925289154, -0.7041428089141846, 0.41130563616752625, 0.8098031878471375], + [-0.5285232067108154, -0.3646079897880554, 0.8295299410820007, 0.4485067129135132], + [0.2222876250743866, -0.6846349239349365, 0.416132390499115, 0.2889649569988251] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [-0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146], + [-0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046], + [-0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146], + [-0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046], + [-0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146], + [-0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046] + ], + [ + [0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418], + [0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568], + [0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418], + [0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568], + [0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418], + [0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568] + ] + ], + [ + [ + [-0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146], + [-0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046], + [-0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146], + [-0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046], + [-0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146, -0.23557746410369873, -0.14902590215206146], + [-0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046, -0.4795427918434143, 0.34592193365097046] + ], + [ + [0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418], + [0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568], + [0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418], + [0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568], + [0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418, 0.12980684638023376, -0.11268305778503418], + [0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568, 0.3864901065826416, 0.024522647261619568] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [-4.013932228088379, -0.42591941356658936], + [-1.2463585138320923, 2.407735586166382] + ], + [ + [0.8048295974731445, -6.116849899291992], + [-1.0416556596755981, -2.0319132804870605] + ] + ], + [ + [ + [-4.013932228088379, -0.42591941356658936], + [-1.2463585138320923, 2.407735586166382] + ], + [ + [0.8048295974731445, -6.116849899291992], + [-1.0416556596755981, -2.0319132804870605] + ] + ], + [ + [ + [-4.013932228088379, -0.42591941356658936], + [-1.2463585138320923, 2.407735586166382] + ], + [ + [0.8048295974731445, -6.116849899291992], + [-1.0416556596755981, -2.0319132804870605] + ] + ] + ], + "bias": [0.25327789783477783, -0.1071862205862999, 0.18484444916248322], + "expected_grad_bias": [32.0, 32.0, 32.0] + }, + { + "test_name": "conv2d_dilation_no_bias", + "conv_type": "Conv2d", + "in_channels": 2, + "out_channels": 2, + "kernel_size": 2, + "stride": 1, + "padding": 0, + "dilation": 2, + "groups": 1, + "has_bias": false, + "input": [ + [ + [ + [0.8268131613731384, 1.457729697227478, -1.029435634613037, -1.2957319021224976, 0.2842133045196533], + [-2.20194673538208, 0.8459939956665039, -0.37988412380218506, 0.4510805904865265, -0.3953653573989868], + [1.7161552906036377, 0.15392881631851196, -1.4651801586151123, -0.5170696377754211, -0.35224831104278564], + [1.9751471281051636, 0.6788665652275085, 0.33716168999671936, -0.5290454030036926, 0.9324967265129089], + [0.6762214303016663, 0.14378660917282104, -0.5371747016906738, -1.025568962097168, 0.21775013208389282] + ], + [ + [2.395190954208374, 0.4023849368095398, 1.6874338388442993, -2.743908405303955, -1.9040486812591553], + [0.6755470037460327, 0.5524077415466309, 2.8273682594299316, 0.47407886385917664, -0.9808987379074097], + [-0.7519583702087402, -1.0324863195419312, -1.8080289363861084, 0.8105757236480713, -0.8080717325210571], + [0.5223215222358704, -0.9890871644020081, 1.2915894985198975, -1.6977379322052002, -1.2025177478790283], + [-1.646543025970459, -0.5333926677703857, -0.44184601306915283, -0.9252714514732361, 1.1864782571792603] + ] + ] + ], + "weight": [ + [ + [ + [0.0028998295310884714, -0.025963637977838516], + [0.25798147916793823, -0.31950950622558594] + ], + [ + [0.0315568633377552, -0.11244720220565796], + [0.029483363032341003, 0.33999162912368774] + ] + ], + [ + [ + [0.010343004949390888, -0.06131957471370697], + [-0.1198321282863617, -0.3528243601322174] + ], + [ + [-0.32032299041748047, 0.1540498435497284], + [-0.2159033864736557, 0.20072883367538452] + ] + ] + ], + "expected_output": [ + [ + [ + [0.18895334005355835, 0.8091791868209839, -0.33649635314941406], + [0.5632208585739136, -0.3073433041572571, -0.37303999066352844], + [0.369911789894104, -0.07539505511522293, 0.22091777622699738] + ], + [ + [-0.32488200068473816, 0.092551589012146, -0.33390378952026367], + [0.010525286197662354, -0.14475469291210175, -1.9261127710342407], + [0.4452356696128845, 0.762944757938385, 0.7822149991989136] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [0.013242834247648716, 0.013242834247648716, -0.07404037564992905, -0.08728320896625519, -0.08728320896625519], + [0.013242834247648716, 0.013242834247648716, -0.07404037564992905, -0.08728320896625519, -0.08728320896625519], + [0.15139219164848328, 0.15139219164848328, -0.6082248687744141, -0.759617030620575, -0.759617030620575], + [0.13814935088157654, 0.13814935088157654, -0.534184455871582, -0.672333836555481, -0.672333836555481], + [0.13814935088157654, 0.13814935088157654, -0.534184455871582, -0.672333836555481, -0.672333836555481] + ], + [ + [-0.28876611590385437, -0.28876611590385437, -0.24716347455978394, 0.041602641344070435, 0.041602641344070435], + [-0.28876611590385437, -0.28876611590385437, -0.24716347455978394, 0.041602641344070435, 0.041602641344070435], + [-0.47518613934516907, -0.47518613934516907, 0.10713696479797363, 0.5823230743408203, 0.5823230743408203], + [-0.1864200234413147, -0.1864200234413147, 0.35430043935775757, 0.5407204627990723, 0.5407204627990723], + [-0.1864200234413147, -0.1864200234413147, 0.35430043935775757, 0.5407204627990723, 0.5407204627990723] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [-0.07582569122314453, -4.699621200561523], + [3.67891263961792, -2.9388787746429443] + ], + [ + [4.947858810424805, -2.445499897003174], + [-5.389431476593018, -3.5948305130004883] + ] + ], + [ + [ + [-0.07582569122314453, -4.699621200561523], + [3.67891263961792, -2.9388787746429443] + ], + [ + [4.947858810424805, -2.445499897003174], + [-5.389431476593018, -3.5948305130004883] + ] + ] + ], + "bias": null, + "expected_grad_bias": null + }, + { + "test_name": "conv2d_groups", + "conv_type": "Conv2d", + "in_channels": 4, + "out_channels": 4, + "kernel_size": 3, + "stride": 1, + "padding": 1, + "dilation": 1, + "groups": 2, + "has_bias": true, + "input": [ + [ + [ + [-1.751257061958313, 0.9016333222389221, -1.3840047121047974, -0.8077333569526672], + [1.2065165042877197, 0.1894015073776245, -0.6532776355743408, -0.2359209805727005], + [0.6057451367378235, -0.3558703064918518, 1.4644410610198975, -0.8236719965934753], + [-0.3576895296573639, -0.9380688667297363, 1.4365062713623047, 1.5855119228363037] + ], + [ + [-1.2674946784973145, -0.382905513048172, 0.8315308690071106, 1.8437397480010986], + [0.021685972809791565, 0.28762421011924744, 0.39912644028663635, 2.4587013721466064], + [0.4567311406135559, 1.106606125831604, 0.9169553518295288, -0.23127765953540802], + [-1.498667597770691, 0.6427391171455383, 0.18222902715206146, -1.0757185220718384] + ], + [ + [1.5721423625946045, 1.0094727277755737, 0.4029425382614136, 1.6145533323287964], + [0.4475519359111786, 0.5499113202095032, 0.21999366581439972, -0.019641423597931862], + [0.20199595391750336, -0.305207222700119, -0.11879883706569672, 0.8017854690551758], + [-0.3870505392551422, 1.560797929763794, 0.04964430257678032, -0.33324524760246277] + ], + [ + [0.9817765355110168, 1.0983757972717285, -0.25671127438545227, -2.2621774673461914], + [0.16864269971847534, -1.6143133640289307, -0.011093960143625736, 1.2098065614700317], + [-1.4476906061172485, 1.3612878322601318, -1.3062708377838135, 1.6407432556152344], + [-0.33931660652160645, 0.13347899913787842, -0.9141297936439514, -0.15479597449302673] + ] + ] + ], + "weight": [ + [ + [ + [-0.2118092179298401, 0.14405177533626556, 0.09775334596633911], + [0.1980074644088745, 0.09006017446517944, 0.0047105285339057446], + [-0.17013078927993774, -0.22346627712249756, 0.0473034605383873] + ], + [ + [0.19228684902191162, -0.01832331344485283, -0.07689450681209564], + [0.11791721731424332, -0.11411400139331818, 0.08317071199417114], + [-0.1912676841020584, -0.213609978556633, -0.1375083029270172] + ] + ], + [ + [ + [-0.040356844663619995, 0.07947856187820435, 0.11925877630710602], + [-0.14918765425682068, -0.048408158123493195, 0.07760867476463318], + [-0.05197979137301445, 0.0035420211497694254, 0.15259502828121185] + ], + [ + [-0.06799621134996414, 0.12151914089918137, 0.191517636179924], + [0.07857467979192734, -0.041881661862134933, -0.14924223721027374], + [-0.15578678250312805, -0.04579702019691467, 0.07366083562374115] + ] + ], + [ + [ + [0.12890973687171936, -0.011370348744094372, 0.18993960320949554], + [-0.09335515648126602, 0.047002024948596954, -0.12067266553640366], + [0.06333556026220322, 0.20116466283798218, 0.08258989453315735] + ], + [ + [0.01579270511865616, -0.0867452397942543, -0.19338275492191315], + [-0.16129334270954132, -0.20218341052532196, 0.16752423346042633], + [-0.09832927584648132, -0.03526713326573372, -0.2112143188714981] + ] + ], + [ + [ + [-0.06572315096855164, 0.200480118393898, 0.04333389922976494], + [-0.04208781570196152, 0.12285008281469345, 0.18171538412570953], + [0.18341569602489471, -0.17439529299736023, 0.016922689974308014] + ], + [ + [-0.19355164468288422, -0.16251049935817719, 0.1809920221567154], + [-0.18778851628303528, -0.11559239774942398, 0.11303582787513733], + [0.22933201491832733, -0.22896496951580048, -0.1520940065383911] + ] + ] + ], + "expected_output": [ + [ + [ + [-0.437945157289505, -0.8000756502151489, -0.4048003554344177, -0.9892394542694092], + [-0.4748215973377228, -0.1758265495300293, -1.3033438920974731, -0.3647018373012543], + [0.43747565150260925, 0.14501184225082397, -0.42697831988334656, -0.009811826050281525], + [0.05442729592323303, -0.5152503252029419, 0.24377942085266113, 0.2307688295841217] + ], + [ + [0.5009375214576721, -0.06373965740203857, -0.21782654523849487, 0.27462610602378845], + [-0.1555309295654297, 0.18543344736099243, -0.28936246037483215, 0.16772562265396118], + [0.08913391828536987, 0.5260958075523376, 0.7041329741477966, 0.3167964816093445], + [0.3670835494995117, 0.6219228506088257, 0.603873610496521, -0.2641192674636841] + ], + [ + [0.5454115867614746, -0.23418590426445007, -0.6559411287307739, 0.6428182125091553], + [-0.5572447776794434, 0.9416273832321167, 1.0619807243347168, 0.32319754362106323], + [1.1361644268035889, 0.6113019585609436, 0.322215735912323, 0.02001272141933441], + [-0.17614921927452087, 0.23407401144504547, 0.10337664186954498, 0.10841583460569382] + ], + [ + [0.6723560094833374, 0.3375539183616638, -0.4744628369808197, 0.4022575616836548], + [0.5820930600166321, -0.2659001350402832, 0.43256571888923645, -0.12483265995979309], + [0.3827870488166809, 0.052575767040252686, 1.4474822282791138, -0.013742789626121521], + [0.946242094039917, 0.0519019216299057, 0.36664947867393494, 0.44821277260780334] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [0.06183610111474991, 0.36116743087768555, 0.36116743087768555, 0.5645136833190918], + [-0.38019871711730957, 0.11903107166290283, 0.11903107166290283, 0.5444878935813904], + [-0.38019871711730957, 0.11903107166290283, 0.11903107166290283, 0.5444878935813904], + [-0.351563036441803, -0.06934532523155212, -0.06934532523155212, 0.10394544154405594] + ], + [ + [0.26798269152641296, 0.3165343403816223, 0.3165343403816223, -0.004248231649398804], + [-0.33847877383232117, -0.3537746071815491, -0.3537746071815491, -0.32750266790390015], + [-0.33847877383232117, -0.3537746071815491, -0.3537746071815491, -0.32750266790390015], + [-0.5659652352333069, -0.6958842277526855, -0.6958842277526855, -0.5453216433525085] + ], + [ + [0.28670549392700195, 0.5810217261314392, 0.5810217261314392, 0.653278112411499], + [0.5602260828018188, 0.9540549516677856, 0.9540549516677856, 0.7795600891113281], + [0.5602260828018188, 0.9540549516677856, 0.9540549516677856, 0.7795600891113281], + [0.30792975425720215, 0.4684850573539734, 0.4684850573539734, 0.3571767807006836] + ], + [ + [-1.0938724279403687, -0.8257030844688416, -0.8257030844688416, -0.2988622188568115], + [-1.2271018028259277, -1.3222408294677734, -1.3222408294677734, -0.9264026284217834], + [-1.2271018028259277, -1.3222408294677734, -1.3222408294677734, -0.9264026284217834], + [-0.8000870943069458, -0.882835328578949, -0.882835328578949, -0.664756178855896] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [0.22332781553268433, -1.643998622894287, -1.705003261566162], + [0.3640757203102112, 0.08226120471954346, 0.37894606590270996], + [2.5977039337158203, 3.1236228942871094, 1.669050931930542] + ], + [ + [2.3698599338531494, 6.441023826599121, 7.230101108551025], + [1.6961604356765747, 4.691606044769287, 6.979351043701172], + [2.5150299072265625, 3.6667346954345703, 4.686985015869141] + ] + ], + [ + [ + [0.22332781553268433, -1.643998622894287, -1.705003261566162], + [0.3640757203102112, 0.08226120471954346, 0.37894606590270996], + [2.5977039337158203, 3.1236228942871094, 1.669050931930542] + ], + [ + [2.3698599338531494, 6.441023826599121, 7.230101108551025], + [1.6961604356765747, 4.691606044769287, 6.979351043701172], + [2.5150299072265625, 3.6667346954345703, 4.686985015869141] + ] + ], + [ + [ + [3.98000431060791, 6.376701354980469, 4.1550116539001465], + [5.203395843505859, 7.266847610473633, 5.432208061218262], + [2.2188384532928467, 2.6677372455596924, 2.4052398204803467] + ], + [ + [-1.0259971618652344, -0.43762481212615967, -0.14035344123840332], + [-2.1459646224975586, -1.7123881578445435, -1.0758002996444702], + [-3.9694056510925293, -1.2736515998840332, 0.3447127342224121] + ] + ], + [ + [ + [3.98000431060791, 6.376701354980469, 4.1550116539001465], + [5.203395843505859, 7.266847610473633, 5.432208061218262], + [2.2188384532928467, 2.6677372455596924, 2.4052398204803467] + ], + [ + [-1.0259971618652344, -0.43762481212615967, -0.14035344123840332], + [-2.1459646224975586, -1.7123881578445435, -1.0758002996444702], + [-3.9694056510925293, -1.2736515998840332, 0.3447127342224121] + ] + ] + ], + "bias": [-0.09242674708366394, 0.1825886368751526, 0.13736048340797424, 0.1469424068927765], + "expected_grad_bias": [16.0, 16.0, 16.0, 16.0] + }, + { + "test_name": "conv3d_basic", + "conv_type": "Conv3d", + "in_channels": 1, + "out_channels": 1, + "kernel_size": 3, + "stride": 1, + "padding": 0, + "dilation": 1, + "groups": 1, + "has_bias": true, + "input": [ + [ + [ + [ + [-0.20922881364822388, 1.8449939489364624, 0.07891727983951569, 0.506382942199707, 0.7588040828704834], + [-1.5863133668899536, -0.6730678081512451, -1.2264569997787476, -0.2706870436668396, -0.18152348697185516], + [-0.1410752534866333, -0.5513549447059631, -0.3231433033943176, 2.119018793106079, 0.7877871990203857], + [-1.9625552892684937, -0.24212808907032013, -0.5964023470878601, 0.4040224254131317, 0.479513943195343], + [-0.699771523475647, -1.096972107887268, -0.55166095495224, -0.3263216018676758, 2.3630404472351074] + ], + [ + [-0.6355212926864624, 0.24983063340187073, -0.024390900507569313, 0.3701395094394684, -0.7284356355667114], + [-0.9289583563804626, 0.8983376026153564, 1.5237942934036255, 0.9267290830612183, 0.6964794397354126], + [-0.18853561580181122, -1.1001535654067993, 0.8703535795211792, 0.9143402576446533, -0.9333758354187012], + [-1.4757920503616333, -0.716029942035675, -1.0355678796768188, 0.11436617374420166, -0.663499116897583], + [1.2121490240097046, 0.9221742749214172, 0.5710628032684326, 0.9190285205841064, 0.06706561148166656] + ], + [ + [-0.20243076980113983, 1.5941754579544067, 0.2908516526222229, -0.08251824229955673, 1.5195056200027466], + [-0.3584267199039459, -0.8824630379676819, -0.681059718132019, 0.3627133369445801, -0.24194136261940002], + [1.4872429370880127, 0.6041548848152161, -1.1257244348526, 0.3761575222015381, -0.8384974598884583], + [-1.2241265773773193, -1.0345147848129272, 0.9935551285743713, 0.4042589068412781, -1.5217982530593872], + [-0.5133728981018066, -1.3806458711624146, -0.06873630732297897, -0.4733000695705414, -0.061788320541381836] + ], + [ + [-1.545185923576355, -2.14782977104187, 0.4973359704017639, -1.268904685974121, -0.24232769012451172], + [0.9422440528869629, 1.0547202825546265, -0.30717894434928894, 0.3090994954109192, 0.6613953113555908], + [-0.03297487273812294, -0.13994985818862915, 1.0257868766784668, -1.170671820640564, 1.9273324012756348], + [-0.5038859844207764, -1.263846516609192, 0.7555463910102844, -0.14636565744876862, -0.09429822862148285], + [0.29657915234565735, -0.31488749384880066, 0.16009655594825745, 2.293909788131714, -1.0465677976608276] + ], + [ + [-0.5004768371582031, -0.8111429214477539, 2.0269458293914795, -2.568801164627075, -1.117609977722168], + [0.19109269976615906, -0.5215631127357483, -1.2473080158233643, 1.4498697519302368, -0.0976748913526535], + [1.1017417907714844, 0.7865778803825378, -1.245198130607605, 1.9315311908721924, 1.5571229596389458e-05], + [-0.09776033461093903, 2.587069511413574, 0.4173567295074463, -0.39924731850624084, -0.03817286342382431], + [1.2513736486434937, 1.3171806335449219, -0.26118963956832886, -0.6803540587425232, -1.1289931535720825] + ] + ] + ] + ], + "weight": [ + [ + [ + [ + [-0.06871431320905685, -0.1895723193883896, 0.18153363466262817], + [-0.10595516115427017, 0.043980490416288376, 0.11969207227230072], + [-0.02988642454147339, 0.15040633082389832, 0.124718077480793] + ], + [ + [-0.03874564543366432, 0.05045807361602783, 0.014584562741219997], + [0.18827179074287415, 0.1332388073205948, -0.12817978858947754], + [0.018542924895882607, 0.10379975289106369, 0.1471288502216339] + ], + [ + [0.06570863723754883, 0.08177531510591507, -0.03553275391459465], + [0.004174221772700548, -0.13319559395313263, 0.15843281149864197], + [0.1515427529811859, -0.1059718206524849, -0.16476500034332275] + ] + ] + ] + ], + "expected_output": [ + [ + [ + [ + [-0.03989933058619499, 1.1945534944534302, 0.809730052947998], + [-0.9439581632614136, 0.14555580914020538, 1.0754095315933228], + [0.6019061803817749, 0.35251688957214355, 0.07972578704357147] + ], + [ + [-0.30882903933525085, 0.2652077078819275, -0.2784307301044464], + [0.956753671169281, -0.36098459362983704, 0.21379412710666656], + [0.28985854983329773, -0.3031429648399353, 0.20372016727924347] + ], + [ + [0.18025630712509155, 0.6066932082176208, -0.18516960740089417], + [-0.8569323420524597, 1.5108901262283325, -0.337395042181015], + [-0.628158450126648, 0.9462756514549255, 0.15218240022659302] + ] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [ + [-0.06871431320905685, -0.25828662514686584, -0.07675299048423767, -0.008038684725761414, 0.18153363466262817], + [-0.17466947436332703, -0.32026129961013794, -0.01903557777404785, 0.15563388168811798, 0.3012257218360901], + [-0.20455589890480042, -0.199741393327713, 0.22620239853858948, 0.4307582974433899, 0.4259437918663025], + [-0.13584157824516296, 0.05854523181915283, 0.30295538902282715, 0.4387969672679901, 0.24441015720367432], + [-0.02988642454147339, 0.12051990628242493, 0.24523797631263733, 0.2751244008541107, 0.124718077480793] + ], + [ + [-0.10745996236801147, -0.24657419323921204, -0.05045599490404129, 0.05700395256280899, 0.19611819088459015], + [-0.025143325328826904, 0.012961730360984802, 0.20059221982955933, 0.22573554515838623, 0.18763050436973572], + [-0.036486826837062836, 0.25582432746887207, 0.7153017520904541, 0.7517886161804199, 0.45947742462158203], + [0.07097313553094864, 0.5023984909057617, 0.7657577991485596, 0.6947846412658691, 0.2633592188358307], + [-0.011343499645590782, 0.24286258220672607, 0.51470947265625, 0.5260530114173889, 0.2718469202518463] + ], + [ + [-0.041751325130462646, -0.09909024089574814, 0.06149520352482796, 0.10324651002883911, 0.1605854332447052], + [0.044739533215761185, 0.03142431378364563, 0.34195488691329956, 0.2972153425216675, 0.31053054332733154], + [0.18493878841400146, 0.3198578655719757, 0.7374703884124756, 0.5525315999984741, 0.4176124930381775], + [0.2266901135444641, 0.41894805431365967, 0.6759752035140991, 0.44928503036499023, 0.2570270299911499], + [0.14019925892353058, 0.2884335219860077, 0.39551544189453125, 0.25531619787216187, 0.10708191990852356] + ], + [ + [0.026962991803884506, 0.1591963768005371, 0.13824819028377533, 0.11128519475460052, -0.020948190242052078], + [0.2194090038537979, 0.35168564319610596, 0.3609904646873474, 0.14158141613006592, 0.009304836392402649], + [0.3894946873188019, 0.519599199295044, 0.511267900466919, 0.12177324295043945, -0.008331313729286194], + [0.3625316917896271, 0.3604028522968292, 0.3730197548866272, 0.010488033294677734, 0.012616872787475586], + [0.17008568346500397, 0.16791361570358276, 0.15027746558189392, -0.019808202981948853, -0.017636150121688843] + ], + [ + [0.06570863723754883, 0.1474839448928833, 0.11195118725299835, 0.04624256119132042, -0.03553275391459465], + [0.06988286226987839, 0.018462568521499634, 0.14136263728141785, 0.07147978246212006, 0.12290005385875702], + [0.2214256227016449, 0.06403350085020065, 0.02216857671737671, -0.1992570459842682, -0.041864946484565735], + [0.15571697056293488, -0.08345044404268265, -0.08978263288736343, -0.2454996109008789, -0.006332188844680786], + [0.1515427529811859, 0.04557093232870102, -0.11919406801462173, -0.27073681354522705, -0.16476500034332275] + ] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [ + [-1.3956527709960938, 6.589871406555176, 5.444220066070557], + [-11.676412582397461, 0.05304795503616333, 1.3334128856658936], + [-9.367570877075195, -1.4101629257202148, 2.863755702972412] + ], + [ + [0.7380445003509521, 2.9376754760742188, 4.626987934112549], + [-2.8434526920318604, 1.5303876399993896, 3.101929187774658], + [-3.2200429439544678, 0.9543972015380859, 2.2126691341400146] + ], + [ + [-0.1460428237915039, -1.890393614768982, 0.14312434196472168], + [1.2811082601547241, 2.8933072090148926, 1.459482192993164], + [3.5774481296539307, 3.94854998588562, -0.015356063842773438] + ] + ] + ] + ], + "bias": [0.1500702053308487], + "expected_grad_bias": [27.0] + }, + { + "test_name": "conv3d_stride_padding", + "conv_type": "Conv3d", + "in_channels": 2, + "out_channels": 3, + "kernel_size": 2, + "stride": 2, + "padding": 1, + "dilation": 1, + "groups": 1, + "has_bias": true, + "input": [ + [ + [ + [ + [-0.2560498118400574, 0.4420258402824402, 0.0863056629896164, -1.059638500213623], + [0.6893559694290161, 0.033526014536619186, -0.2977294325828552, -1.1700760126113892], + [-2.578432559967041, -0.7338599562644958, 0.41111159324645996, 0.9563807249069214], + [-0.14632517099380493, 0.7788861393928528, 0.6403340101242065, 0.8075235486030579] + ], + [ + [0.2157520353794098, -0.1576623022556305, 1.7260406017303467, 0.620775580406189], + [-1.160732388496399, -1.486362099647522, 0.13496126234531403, 1.354621410369873], + [-0.26716873049736023, -0.5046500563621521, -1.6846002340316772, -1.187042474746704], + [0.6727956533432007, -0.4322131276130676, 1.8005059957504272, 1.3338027000427246] + ], + [ + [-2.6210720539093018, 0.3556872010231018, -0.5336131453514099, 0.055769819766283035], + [-0.17826782166957855, -0.2625838816165924, 0.7513576745986938, 0.18278615176677704], + [-0.6692651510238647, 0.38295796513557434, -0.3610095679759979, -0.1767248809337616], + [0.46591266989707947, 1.3539252281188965, -1.459295630455017, 0.37469375133514404] + ], + [ + [0.6077072620391846, -0.323801726102829, 1.5925687551498413, 0.959291398525238], + [-0.4339471161365509, 0.9574882388114929, 1.254128336906433, -1.0384563207626343], + [0.8031599521636963, 0.2645536959171295, -1.123774766921997, -1.1109771728515625], + [0.7272883057594299, -1.7194626331329346, 2.383774995803833, -1.1649107933044434] + ] + ], + [ + [ + [-2.3457014560699463, -1.1401145458221436, -0.35688871145248413, -0.615592360496521], + [-0.8155969977378845, -0.1509992778301239, -0.5020706653594971, 0.8036778569221497], + [0.38699206709861755, 0.6978518962860107, 0.2989071011543274, 1.3116708993911743], + [-1.5332636833190918, -1.9342981576919556, 0.3510490953922272, -0.6404718160629272] + ], + [ + [-0.15049251914024353, 1.386378526687622, -0.28399038314819336, -0.4822995364665985], + [0.9396787285804749, 1.0826958417892456, -0.6519155502319336, 0.2863307297229767], + [1.1631540060043335, 0.15193448960781097, 1.3717601299285889, -0.7534017562866211], + [-0.6460452675819397, 0.04811352491378784, 1.382570505142212, -0.6665758490562439] + ], + [ + [0.7093656659126282, -0.43539032340049744, 0.514474093914032, 0.09726667404174805], + [1.1576530933380127, -0.21968594193458557, 0.4215473532676697, -0.32665783166885376], + [-0.1360998898744583, 1.856263518333435, -1.09726881980896, 0.2263745665550232], + [0.6454427242279053, -0.6314883828163147, -1.2809436321258545, 0.009232764132320881] + ], + [ + [0.3166883885860443, 0.545379102230072, -0.5113165974617004, 1.4299159049987793], + [0.7535606026649475, -0.38888975977897644, 2.5921998023986816, 0.08543383330106735], + [-0.24551886320114136, -0.7772120833396912, -0.268094003200531, 0.08797460794448853], + [1.082465648651123, -1.8998514413833618, -2.7074427604675293, -0.7867730259895325] + ] + ] + ], + [ + [ + [ + [0.09093067049980164, 0.5856727361679077, 0.33771756291389465, 0.07148759812116623], + [-0.7571792006492615, -1.0642281770706177, 1.9611531496047974, -0.4390997588634491], + [-1.2553943395614624, -0.3051481544971466, 0.10523247718811035, 0.6907721161842346], + [0.9099079370498657, -0.7323962450027466, -0.6663155555725098, -0.3277064561843872] + ], + [ + [1.6730942726135254, 2.073265552520752, -1.2809516191482544, -0.035444121807813644], + [-0.5971476435661316, -1.0348221063613892, 0.42101818323135376, -0.548947811126709], + [-0.9191765189170837, -0.632600724697113, 1.6793338060379028, 0.2978390157222748], + [-0.1553155928850174, -0.4344737231731415, -1.0962450504302979, -1.122981309890747] + ], + [ + [0.9182227849960327, -1.2793315649032593, 1.3690991401672363, -0.22597739100456238], + [-1.0449823141098022, -0.27992430329322815, 1.2101659774780273, -0.9885204434394836], + [1.1357662677764893, 0.738558292388916, -1.4970489740371704, 1.7736228704452515], + [0.3266002833843231, -0.49929699301719666, -1.4715039730072021, 0.5023646950721741] + ], + [ + [0.0795271098613739, -0.8189097046852112, 0.9071961045265198, -1.26920485496521], + [-0.5910199284553528, 2.3618080615997314, 1.2076122760772705, -0.5487976670265198], + [-0.54526287317276, -0.13092944025993347, -0.061628274619579315, 0.08023218810558319], + [0.02449978142976761, 0.25982725620269775, 0.7348682284355164, -0.006029692944139242] + ] + ], + [ + [ + [-1.2950924634933472, -0.5916725397109985, -0.009037514217197895, -0.07328195124864578], + [-0.48808231949806213, -0.6568363904953003, -0.4720822274684906, 0.35971710085868835], + [1.8668620586395264, 0.2708132565021515, -1.3498177528381348, -0.5766077637672424], + [0.020138248801231384, 0.573236346244812, 0.37203314900398254, 0.22609540820121765] + ], + [ + [0.31892329454421997, -2.3837339878082275, 0.005561305209994316, 0.21266300976276398], + [0.1943359375, 1.0423648357391357, -1.1257092952728271, -0.04083564877510071], + [-1.9132182598114014, -0.5003356337547302, 0.28680968284606934, 0.02185521274805069], + [-0.16209769248962402, -0.026765741407871246, -0.33800795674324036, 0.6160297393798828] + ], + [ + [-1.0447378158569336, -0.4145452380180359, -0.29702383279800415, 2.4519524574279785], + [0.18906556069850922, 1.0375696420669556, 0.7793638706207275, -0.6498270630836487], + [1.5214240550994873, 1.7304459810256958, 1.014835000038147, -0.9632375240325928], + [-0.4939593970775604, 0.30456098914146423, 2.1306238174438477, 0.026654772460460663] + ], + [ + [-0.8898360729217529, -0.25780898332595825, -0.0801180824637413, 1.581257939338684], + [-1.3104760646820068, -1.4218922853469849, -0.3201002776622772, 0.011662798002362251], + [0.17730486392974854, -0.6651375889778137, 0.10343444347381592, -1.2494463920593262], + [-1.1447242498397827, 0.22346419095993042, -0.9369489550590515, -0.17544429004192352] + ] + ] + ] + ], + "weight": [ + [ + [ + [ + [0.03610756993293762, 0.13593754172325134], + [-0.214359849691391, 0.03445395827293396] + ], + [ + [0.05883944034576416, 0.1566893756389618], + [-0.13382488489151, -0.02987608313560486] + ] + ], + [ + [ + [-0.14901646971702576, -0.16825300455093384], + [0.10407266020774841, 0.03812253475189209] + ], + [ + [0.09933644533157349, -0.005692780017852783], + [-0.006141453981399536, -0.009599179029464722] + ] + ] + ], + [ + [ + [ + [0.13504061102867126, -0.17298537492752075], + [0.053891122341156006, 0.13708344101905823] + ], + [ + [0.14499762654304504, -0.11996981501579285], + [0.19359561800956726, -0.16661545634269714] + ] + ], + [ + [ + [0.13035959005355835, 0.2045004963874817], + [0.00802159309387207, 0.0765899121761322] + ], + [ + [-0.2368302345275879, -0.09888988733291626], + [-0.09822297096252441, -0.23275858163833618] + ] + ] + ], + [ + [ + [ + [-0.05294084548950195, 0.1745298206806183], + [-0.023869633674621582, 0.24704435467720032] + ], + [ + [-0.22504374384880066, 0.09970375895500183], + [0.21617761254310608, 0.2429896891117096] + ] + ], + [ + [ + [-0.09697866439819336, -0.2257283329963684], + [-0.09188583493232727, 0.18532708287239075] + ], + [ + [0.021904200315475464, 0.23895001411437988], + [-0.1825445592403412, -0.1565455198287964] + ] + ] + ] + ], + "expected_output": [ + [ + [ + [ + [0.15757492184638977, 0.07610362023115158, 0.2729949951171875], + [0.3133848011493683, 0.14935964345932007, 0.0023531264159828424], + [0.11320929229259491, 0.0794263631105423, 0.11130039393901825] + ], + [ + [0.20060275495052338, 0.3202092945575714, -0.06391594558954239], + [-0.1665666252374649, 0.19540159404277802, 0.31026408076286316], + [0.39689481258392334, -0.08766239881515503, 0.2978634238243103] + ], + [ + [0.16041924059391022, 0.28895512223243713, 0.07058995217084885], + [-0.0400579459965229, -0.23226553201675415, 0.3244858384132385], + [0.044146034866571426, 1.1280118227005005, 0.20258839428424835] + ] + ], + [ + [ + [0.6743638515472412, 0.35196858644485474, -0.05895623937249184], + [0.42320317029953003, -0.1369774341583252, -0.2179587334394455], + [0.25489869713783264, 0.5452207922935486, 0.3544919192790985] + ], + [ + [0.3753697872161865, 0.38398972153663635, 0.11654826998710632], + [0.5812307000160217, -0.1928812712430954, 0.2833777964115143], + [-0.28250378370285034, 0.6525160074234009, 0.2310858964920044] + ], + [ + [0.19328156113624573, 0.25179770588874817, 0.14888730645179749], + [0.406185507774353, 0.3109228312969208, -0.102542944252491], + [0.18127433955669403, -1.360172986984253, -0.17415384948253632] + ] + ], + [ + [ + [0.14257128536701202, 0.2180982083082199, -0.2791174054145813], + [-0.9756902456283569, -0.5558570027351379, 0.08581161499023438], + [-0.5433828830718994, -0.23234613239765167, -0.35817745327949524] + ], + [ + [-0.8849518299102783, 0.03390062600374222, -0.13862080872058868], + [-0.3100248873233795, -0.12397056072950363, -0.2921593189239502], + [0.3015151023864746, -0.9121512770652771, -0.25250956416130066] + ], + [ + [0.04640128090977669, 0.09387040883302689, -0.3167072534561157], + [-0.25534218549728394, -0.8038539886474609, -0.09729380905628204], + [-0.2798299789428711, 1.1400408744812012, -0.02444874495267868] + ] + ] + ], + [ + [ + [ + [0.13712352514266968, 0.042661577463150024, 0.11829159408807755], + [0.03113088570535183, 0.35850775241851807, 0.048403702676296234], + [0.26986661553382874, 0.03473536670207977, 0.13058580458164215] + ], + [ + [0.17980697751045227, -0.47331976890563965, 0.17232142388820648], + [-0.3044206202030182, 0.5309773087501526, -0.3020525276660919], + [0.18755534291267395, -0.21826347708702087, 0.0272683035582304] + ], + [ + [0.09622557461261749, 0.3043210804462433, 0.5640406608581543], + [0.2555309534072876, 0.6032538414001465, -0.04137711971998215], + [0.32334208488464355, 0.3610308766365051, 0.15333472192287445] + ] + ], + [ + [ + [0.37201324105262756, 0.20305393636226654, 0.10675747692584991], + [-0.0005351829458959401, 0.10934583097696304, 0.1272260993719101], + [-0.0254331324249506, -0.11308815330266953, -0.01534306351095438] + ], + [ + [0.4296812415122986, -0.362775593996048, -0.19907069206237793], + [-0.4804687201976776, -0.5294472575187683, 0.4710357189178467], + [0.08910350501537323, -0.03461659327149391, 0.08090616017580032] + ], + [ + [0.028469210490584373, 0.1577451527118683, 0.030005156993865967], + [-0.14120210707187653, -0.06797496229410172, 0.00743146101012826], + [-0.15261496603488922, -0.16879023611545563, 0.0620347261428833] + ] + ], + [ + [ + [0.06241583824157715, 0.15567229688167572, -0.13358905911445618], + [-0.9518380165100098, 0.266897052526474, 0.1988617479801178], + [-0.0668870285153389, 0.03742033243179321, -0.08371957391500473] + ], + [ + [0.696681022644043, -0.1300133317708969, -0.6775568127632141], + [-0.9133601188659668, 0.3617091476917267, 0.6289619207382202], + [-0.23840579390525818, 0.22958028316497803, -0.2751806378364563] + ], + [ + [-0.3076843023300171, 0.09008538722991943, -0.2774200439453125], + [-0.07160405069589615, 0.20164307951927185, -0.02160622552037239], + [0.10025234520435333, 0.1419053077697754, -0.14508673548698425] + ] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [ + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455], + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455] + ], + [ + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693], + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693] + ], + [ + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455], + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455] + ], + [ + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693], + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693] + ] + ], + [ + [ + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894], + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894] + ], + [ + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077], + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077] + ], + [ + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894], + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894] + ], + [ + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077], + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077] + ] + ] + ], + [ + [ + [ + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455], + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455] + ], + [ + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693], + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693] + ], + [ + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455], + [0.04649814963340759, 0.27594834566116333, 0.04649814963340759, 0.27594834566116333], + [0.13642331957817078, -0.021206676959991455, 0.13642331957817078, -0.021206676959991455] + ], + [ + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693], + [0.4185817539691925, -0.18433836102485657, 0.4185817539691925, -0.18433836102485657], + [0.13748198747634888, 0.11820733547210693, 0.13748198747634888, 0.11820733547210693] + ] + ], + [ + [ + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894], + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894] + ], + [ + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077], + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077] + ], + [ + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894], + [-0.3989032804965973, -0.28690898418426514, -0.3989032804965973, -0.28690898418426514], + [0.13436734676361084, -0.11558958888053894, 0.13436734676361084, -0.11558958888053894] + ], + [ + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077], + [0.30003952980041504, 0.020208418369293213, 0.30003952980041504, 0.020208418369293213], + [-0.18948084115982056, -0.11563554406166077, -0.18948084115982056, -0.11563554406166077] + ] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [ + [-3.269909620285034, 5.327045440673828], + [-1.8752652406692505, 3.4018166065216064] + ], + [ + [-1.7301268577575684, 0.9331886768341064], + [2.272254467010498, -5.31749963760376] + ] + ], + [ + [ + [-2.010932445526123, -2.3986570835113525], + [-1.6520172357559204, -0.5989487171173096] + ], + [ + [-1.8695197105407715, 0.48091793060302734], + [3.8321967124938965, -1.6188081502914429] + ] + ] + ], + [ + [ + [ + [-3.269909620285034, 5.327045440673828], + [-1.8752652406692505, 3.4018166065216064] + ], + [ + [-1.7301268577575684, 0.9331886768341064], + [2.272254467010498, -5.31749963760376] + ] + ], + [ + [ + [-2.010932445526123, -2.3986570835113525], + [-1.6520172357559204, -0.5989487171173096] + ], + [ + [-1.8695197105407715, 0.48091793060302734], + [3.8321967124938965, -1.6188081502914429] + ] + ] + ], + [ + [ + [ + [-3.269909620285034, 5.327045440673828], + [-1.8752652406692505, 3.4018166065216064] + ], + [ + [-1.7301268577575684, 0.9331886768341064], + [2.272254467010498, -5.31749963760376] + ] + ], + [ + [ + [-2.010932445526123, -2.3986570835113525], + [-1.6520172357559204, -0.5989487171173096] + ], + [ + [-1.8695197105407715, 0.48091793060302734], + [3.8321967124938965, -1.6188081502914429] + ] + ] + ] + ], + "bias": [0.12740835547447205, 0.08571982383728027, -0.16242030262947083], + "expected_grad_bias": [54.0, 54.0, 54.0] + }, + { + "test_name": "conv3d_dilation_no_bias", + "conv_type": "Conv3d", + "in_channels": 2, + "out_channels": 2, + "kernel_size": 2, + "stride": 1, + "padding": 0, + "dilation": 2, + "groups": 1, + "has_bias": false, + "input": [ + [ + [ + [ + [-0.6203854084014893, 1.3424702882766724, -1.7992236614227295, -1.2398992776870728, -1.1074808835983276], + [-0.6043308973312378, 0.0144581263884902, -0.2186170518398285, -0.06249626725912094, -0.4906177818775177], + [-1.3847423791885376, 0.8051268458366394, 1.1163915395736694, 0.4140532910823822, 0.12140734493732452], + [0.836719274520874, -1.3327927589416504, 1.8258413076400757, -1.0219624042510986, -0.12498770654201508], + [-0.5020677447319031, 0.45930108428001404, -1.3068112134933472, 0.9302626848220825, -0.14492876827716827] + ], + [ + [-0.5604604482650757, 2.031674861907959, -1.5307694673538208, -0.2891634404659271, -2.518200635910034], + [-2.533468723297119, -0.022988365963101387, 2.116429328918457, 0.10532508790493011, -1.7034618854522705], + [0.3412906229496002, 0.20424777269363403, -0.8860291838645935, 1.0496968030929565, 0.5568186640739441], + [0.8174132108688354, 0.5786475539207458, -0.5710321664810181, -0.953288733959198, -0.709164023399353], + [-0.9840890765190125, -0.17089858651161194, 0.2695762813091278, 0.5619763135910034, 1.1325730085372925] + ], + [ + [-0.25796496868133545, 2.5571300983428955, -1.1872515678405762, -0.83109050989151, -1.1144185066223145], + [1.2617039680480957, -0.30662697553634644, -1.1087933778762817, 0.15701289474964142, -0.7622604370117188], + [-0.5229234099388123, -0.26092126965522766, -2.14499831199646, 1.0247118473052979, 0.4615347981452942], + [-1.7553198337554932, 0.5876975059509277, 1.5004618167877197, -0.45995032787323, 0.06401844322681427], + [-0.4596543312072754, -0.5983195900917053, -0.4856646656990051, 2.03764009475708, -0.1844426393508911] + ], + [ + [0.8303185701370239, -1.7404849529266357, -0.43988513946533203, 0.978778600692749, 1.003504991531372], + [-0.5394609570503235, 0.15667305886745453, 0.41149622201919556, 0.41322529315948486, -2.1648738384246826], + [0.9794597625732422, 0.38047143816947937, -1.9277540445327759, -0.3310789167881012, -1.4065375328063965], + [-1.425918698310852, 0.783023476600647, 0.0861901044845581, -0.37659966945648193, 0.9756653904914856], + [-0.8411983847618103, -0.8522480726242065, -1.0400315523147583, 0.9906719923019409, -0.5392524600028992] + ], + [ + [-0.25051894783973694, 2.1708250045776367, -1.229917049407959, 0.3085583448410034, 1.8373844623565674], + [0.7339112758636475, -0.015649020671844482, 1.4117823839187622, -0.34354400634765625, 0.34807446599006653], + [-1.7184555530548096, -0.8929248452186584, -0.5750245451927185, 0.43019720911979675, -1.0010207891464233], + [-2.3218765258789062, 1.2518025636672974, -0.35988521575927734, -0.43994462490081787, 1.9879144430160522], + [1.3556616306304932, -1.05279541015625, 0.7319883108139038, -0.12117598950862885, 0.979621946811676] + ] + ], + [ + [ + [0.9170622229576111, -0.11666277050971985, 0.2106197327375412, -0.8543375730514526, 1.5414035320281982], + [-1.7241556644439697, 1.9946883916854858, -0.01325446367263794, 1.1153861284255981, -0.4465695023536682], + [1.315346598625183, -0.5311602354049683, -0.866241455078125, 0.9168353080749512, -0.27498602867126465], + [-0.9239965677261353, -0.553339421749115, 0.09259668737649918, 1.0563279390335083, 0.2714795768260956], + [-0.23392368853092194, -1.0699013471603394, -0.4570131301879883, -0.13208438456058502, -1.0929938554763794] + ], + [ + [-2.4860122203826904, -0.038849566131830215, -0.39319077134132385, 0.05676383897662163, 0.7779240608215332], + [0.17819836735725403, 0.8963930010795593, -1.2691730260849, 0.042280785739421844, 0.9680984020233154], + [1.3679786920547485, 0.1106303408741951, -0.5484081506729126, 0.07075770199298859, -0.04678618907928467], + [-0.6908267736434937, -0.38857290148735046, 0.04591083899140358, 1.2395371198654175, -0.7971398234367371], + [0.22626927495002747, -1.4946868419647217, -1.124684453010559, -0.17863909900188446, 0.20412735641002655] + ], + [ + [0.9273927807807922, 1.1264110803604126, 0.299227237701416, -0.7428660988807678, 2.108266830444336], + [0.9701831936836243, -1.7244060039520264, 1.61734139919281, -0.9298601746559143, 0.7922201156616211], + [-0.2129899561405182, -0.2013249695301056, -0.3676932752132416, 0.3004297614097595, -1.303833246231079], + [0.7512189745903015, 0.3729371726512909, 0.06509778648614883, 0.12552233040332794, 0.7588092088699341], + [2.877042770385742, 0.973744809627533, 1.072774887084961, -1.0264922380447388, -1.1302908658981323] + ], + [ + [0.35103702545166016, -0.27594780921936035, -0.9560491442680359, -1.2762675285339355, -0.7730143070220947], + [-0.2413354218006134, 2.3902435302734375, -0.5483986139297485, -0.13558585941791534, -1.0279778242111206], + [0.043768659234046936, 0.4503754675388336, -0.7236151099205017, 0.2793406844139099, -0.8662601113319397], + [0.8459339737892151, 0.8635647892951965, -0.28217804431915283, 2.1240313053131104, -0.13740085065364838], + [1.5633418560028076, -0.6294617652893066, 0.8042920231819153, 0.6676139235496521, 0.3088100254535675] + ], + [ + [-0.123696468770504, 0.7777372598648071, -1.1679390668869019, -0.8011578917503357, -1.122711181640625], + [-0.7824243903160095, -0.5122944712638855, -2.070183753967285, -0.9398608803749084, 0.2550857663154602], + [-0.4017896056175232, -1.2750170230865479, -0.7951065897941589, 1.8864071369171143, 0.1899632215499878], + [1.6621514558792114, 1.6152663230895996, -0.6503500938415527, 0.6934462189674377, -0.7531406283378601], + [-1.054457426071167, -0.34221795201301575, 0.5399923920631409, 0.43685466051101685, 0.5002582669258118] + ] + ] + ] + ], + "weight": [ + [ + [ + [ + [-0.18336549401283264, 0.07602518796920776], + [-0.22722312808036804, -0.052385568618774414] + ], + [ + [-0.08105394244194031, 0.1908881664276123], + [-0.24080249667167664, 0.08882123231887817] + ] + ], + [ + [ + [-0.2278047800064087, 0.10837799310684204], + [0.06122618913650513, -0.09837496280670166] + ], + [ + [0.07384264469146729, 0.2399427592754364], + [0.15220186114311218, -0.22811689972877502] + ] + ] + ], + [ + [ + [ + [-0.06478935480117798, 0.2267712950706482], + [0.03867802023887634, 0.030821144580841064] + ], + [ + [0.01783686876296997, -0.08789634704589844], + [-0.06539773941040039, 0.01659148931503296] + ] + ], + [ + [ + [0.18818816542625427, -0.07046207785606384], + [-0.06542328000068665, 0.04758992791175842] + ], + [ + [-0.091501384973526, 0.2180650234222412], + [0.18806594610214233, 0.010502755641937256] + ] + ] + ] + ], + "expected_output": [ + [ + [ + [ + [0.13421392440795898, -1.1400971412658691, 1.2892173528671265], + [0.935309648513794, -0.5722033977508545, -0.7368906736373901], + [-0.05186110734939575, 0.8800054788589478, 0.7997747659683228] + ], + [ + [0.026579707860946655, -0.5960038900375366, 0.8041927814483643], + [0.7616859078407288, -0.7796168327331543, -0.554863452911377], + [-0.6201406717300415, 0.011632002890110016, -0.003877401351928711] + ], + [ + [0.011638939380645752, -1.5284671783447266, 0.8422683477401733], + [0.5824505686759949, -0.31925106048583984, -0.3557700216770172], + [-0.5940449237823486, 0.997965931892395, 0.38896265625953674] + ] + ], + [ + [ + [-0.32166922092437744, -0.35466498136520386, 0.4611860513687134], + [0.48506858944892883, 0.2462344467639923, 0.0006439238786697388], + [1.2781717777252197, 0.22181449830532074, -0.3764876127243042] + ], + [ + [-1.163193941116333, -0.4850097894668579, -0.803731381893158], + [0.9359619617462158, 0.11395159363746643, -0.9356815218925476], + [0.3402889370918274, 0.35703301429748535, 0.3925244212150574] + ], + [ + [-0.3097846508026123, -0.4295724332332611, -0.8227052688598633], + [-0.3714708089828491, -0.1196538507938385, 0.4053196310997009], + [-1.0225160121917725, 0.598682701587677, 0.38496536016464233] + ] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [ + [-0.24815484881401062, -0.24815484881401062, 0.05464163422584534, 0.30279648303985596, 0.30279648303985596], + [-0.24815484881401062, -0.24815484881401062, 0.05464163422584534, 0.30279648303985596, 0.30279648303985596], + [-0.4366999566555023, -0.4366999566555023, -0.1554678976535797, 0.2812320590019226, 0.2812320590019226], + [-0.1885451078414917, -0.1885451078414917, -0.21010953187942505, -0.02156442403793335, -0.02156442403793335], + [-0.1885451078414917, -0.1885451078414917, -0.21010953187942505, -0.02156442403793335, -0.02156442403793335] + ], + [ + [-0.24815484881401062, -0.24815484881401062, 0.05464163422584534, 0.30279648303985596, 0.30279648303985596], + [-0.24815484881401062, -0.24815484881401062, 0.05464163422584534, 0.30279648303985596, 0.30279648303985596], + [-0.4366999566555023, -0.4366999566555023, -0.1554678976535797, 0.2812320590019226, 0.2812320590019226], + [-0.1885451078414917, -0.1885451078414917, -0.21010953187942505, -0.02156442403793335, -0.02156442403793335], + [-0.1885451078414917, -0.1885451078414917, -0.21010953187942505, -0.02156442403793335, -0.02156442403793335] + ], + [ + [-0.31137192249298096, -0.31137192249298096, 0.09441637992858887, 0.4057883024215698, 0.4057883024215698], + [-0.31137192249298096, -0.31137192249298096, 0.09441637992858887, 0.4057883024215698, 0.4057883024215698], + [-0.8061172962188721, -0.8061172962188721, -0.3164806663990021, 0.4896366000175476, 0.4896366000175476], + [-0.4947453439235687, -0.4947453439235687, -0.41089701652526855, 0.08384829759597778, 0.08384829759597778], + [-0.4947453439235687, -0.4947453439235687, -0.41089701652526855, 0.08384829759597778, 0.08384829759597778] + ], + [ + [-0.06321707367897034, -0.06321707367897034, 0.03977474570274353, 0.10299181938171387, 0.10299181938171387], + [-0.06321707367897034, -0.06321707367897034, 0.03977474570274353, 0.10299181938171387, 0.10299181938171387], + [-0.36941730976104736, -0.36941730976104736, -0.16101276874542236, 0.208404541015625, 0.208404541015625], + [-0.306200236082077, -0.306200236082077, -0.2007875144481659, 0.10541272163391113, 0.10541272163391113], + [-0.306200236082077, -0.306200236082077, -0.2007875144481659, 0.10541272163391113, 0.10541272163391113] + ], + [ + [-0.06321707367897034, -0.06321707367897034, 0.03977474570274353, 0.10299181938171387, 0.10299181938171387], + [-0.06321707367897034, -0.06321707367897034, 0.03977474570274353, 0.10299181938171387, 0.10299181938171387], + [-0.36941730976104736, -0.36941730976104736, -0.16101276874542236, 0.208404541015625, 0.208404541015625], + [-0.306200236082077, -0.306200236082077, -0.2007875144481659, 0.10541272163391113, 0.10541272163391113], + [-0.306200236082077, -0.306200236082077, -0.2007875144481659, 0.10541272163391113, 0.10541272163391113] + ] + ], + [ + [ + [-0.03961661458015442, -0.03961661458015442, -0.0017006993293762207, 0.0379159152507782, 0.0379159152507782], + [-0.03961661458015442, -0.03961661458015442, -0.0017006993293762207, 0.0379159152507782, 0.0379159152507782], + [-0.04381370544433594, -0.04381370544433594, -0.05668282508850098, -0.012869119644165039, -0.012869119644165039], + [-0.0041970908641815186, -0.0041970908641815186, -0.054982125759124756, -0.05078503489494324, -0.05078503489494324], + [-0.0041970908641815186, -0.0041970908641815186, -0.054982125759124756, -0.05078503489494324, -0.05078503489494324] + ], + [ + [-0.03961661458015442, -0.03961661458015442, -0.0017006993293762207, 0.0379159152507782, 0.0379159152507782], + [-0.03961661458015442, -0.03961661458015442, -0.0017006993293762207, 0.0379159152507782, 0.0379159152507782], + [-0.04381370544433594, -0.04381370544433594, -0.05668282508850098, -0.012869119644165039, -0.012869119644165039], + [-0.0041970908641815186, -0.0041970908641815186, -0.054982125759124756, -0.05078503489494324, -0.05078503489494324], + [-0.0041970908641815186, -0.0041970908641815186, -0.054982125759124756, -0.05078503489494324, -0.05078503489494324] + ], + [ + [-0.057275354862213135, -0.057275354862213135, 0.4386483430862427, 0.4959236979484558, 0.4959236979484558], + [-0.057275354862213135, -0.057275354862213135, 0.4386483430862427, 0.4959236979484558, 0.4959236979484558], + [0.27879536151885986, 0.27879536151885986, 0.5063198804855347, 0.2275245189666748, 0.2275245189666748], + [0.336070716381073, 0.336070716381073, 0.06767153739929199, -0.268399178981781, -0.268399178981781], + [0.336070716381073, 0.336070716381073, 0.06767153739929199, -0.268399178981781, -0.268399178981781] + ], + [ + [-0.017658740282058716, -0.017658740282058716, 0.4403490424156189, 0.4580077826976776, 0.4580077826976776], + [-0.017658740282058716, -0.017658740282058716, 0.4403490424156189, 0.4580077826976776, 0.4580077826976776], + [0.3226090669631958, 0.3226090669631958, 0.5630027055740356, 0.24039363861083984, 0.24039363861083984], + [0.3402678072452545, 0.3402678072452545, 0.12265366315841675, -0.21761414408683777, -0.21761414408683777], + [0.3402678072452545, 0.3402678072452545, 0.12265366315841675, -0.21761414408683777, -0.21761414408683777] + ], + [ + [-0.017658740282058716, -0.017658740282058716, 0.4403490424156189, 0.4580077826976776, 0.4580077826976776], + [-0.017658740282058716, -0.017658740282058716, 0.4403490424156189, 0.4580077826976776, 0.4580077826976776], + [0.3226090669631958, 0.3226090669631958, 0.5630027055740356, 0.24039363861083984, 0.24039363861083984], + [0.3402678072452545, 0.3402678072452545, 0.12265366315841675, -0.21761414408683777, -0.21761414408683777], + [0.3402678072452545, 0.3402678072452545, 0.12265366315841675, -0.21761414408683777, -0.21761414408683777] + ] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [ + [-4.159571647644043, -11.871390342712402], + [-4.023550033569336, 4.073703765869141] + ], + [ + [-4.225783824920654, -7.7821879386901855], + [-11.579157829284668, -0.1227450966835022] + ] + ], + [ + [ + [1.4379500150680542, 2.76035475730896], + [-0.39321398735046387, -3.127079963684082] + ], + [ + [-3.4264943599700928, -8.820097923278809], + [7.565301895141602, 2.71728253364563] + ] + ] + ], + [ + [ + [ + [-4.159571647644043, -11.871390342712402], + [-4.023550033569336, 4.073703765869141] + ], + [ + [-4.225783824920654, -7.7821879386901855], + [-11.579157829284668, -0.1227450966835022] + ] + ], + [ + [ + [1.4379500150680542, 2.76035475730896], + [-0.39321398735046387, -3.127079963684082] + ], + [ + [-3.4264943599700928, -8.820097923278809], + [7.565301895141602, 2.71728253364563] + ] + ] + ] + ], + "bias": null, + "expected_grad_bias": null + }, + { + "test_name": "conv3d_groups", + "conv_type": "Conv3d", + "in_channels": 4, + "out_channels": 4, + "kernel_size": 3, + "stride": 1, + "padding": 1, + "dilation": 1, + "groups": 2, + "has_bias": true, + "input": [ + [ + [ + [ + [0.6901775598526001, -0.38219600915908813, 0.4878943860530853, 0.04518410563468933], + [1.9969353675842285, -0.6955253481864929, -0.9111350774765015, -0.7280716300010681], + [2.3521711826324463, 0.41813287138938904, -0.1308819055557251, -0.4394487142562866], + [1.6976182460784912, 0.813541829586029, -0.29035913944244385, -1.5638561248779297] + ], + [ + [-0.4955300986766815, -1.451595664024353, 1.1304244995117188, -0.23605898022651672], + [-1.4099090099334717, 0.033967722207307816, -1.116248369216919, 0.009235713630914688], + [0.6194980144500732, 1.2993793487548828, 0.37411054968833923, 0.29060807824134827], + [-0.14929574728012085, 0.16823826730251312, -0.5334858298301697, -0.10448182374238968] + ], + [ + [0.30514323711395264, -0.0909213200211525, -1.1586445569992065, -0.7481539845466614], + [1.1121810674667358, -0.4421563148498535, -0.576346218585968, -0.9974139928817749], + [-1.2427209615707397, 0.10128959268331528, 0.19372011721134186, 0.24058400094509125], + [0.9057638049125671, 0.11222076416015625, 0.05109323561191559, -0.5710897445678711] + ], + [ + [1.0549991130828857, -0.04648401960730553, 1.6230204105377197, 0.9345911145210266], + [0.6804553866386414, 2.2367258071899414, 0.12645944952964783, 1.2714524269104004], + [-1.208367109298706, -1.4461109638214111, -0.6192750930786133, -0.40478718280792236], + [0.45292869210243225, 0.40945965051651, 2.1360230445861816, -1.1599798202514648] + ] + ], + [ + [ + [2.428079843521118, 1.1440294981002808, -0.101145900785923, 0.09917446225881577], + [0.3814586102962494, 1.6723263263702393, 1.1702476739883423, -0.4936075210571289], + [-0.39397287368774414, -0.4608776867389679, 0.51866215467453, 0.7784189581871033], + [-1.5908970832824707, 1.9877948760986328, -0.6688686013221741, 0.3630669116973877] + ], + [ + [-0.776123583316803, 0.7065202593803406, 0.020995743572711945, -0.7148111462593079], + [-0.21817894279956818, 0.6545652151107788, 0.5159794688224792, 1.2866613864898682], + [0.869994044303894, 0.17624177038669586, 0.10479936003684998, 1.1782784461975098], + [-0.8539279699325562, 0.18701297044754028, -2.0761170387268066, -0.37549611926078796] + ], + [ + [-1.2304182052612305, -0.026510445401072502, 0.8861871957778931, -0.7287083268165588], + [1.380319356918335, 0.6684188842773438, -1.6208312511444092, -0.9440155625343323], + [-0.7828742265701294, -0.08022477477788925, 0.9383800625801086, -0.4210285246372223], + [-0.49148616194725037, -0.9306524395942688, -0.3210572898387909, 0.5819711089134216] + ], + [ + [-1.4208135604858398, 0.2507438659667969, -0.5702037215232849, -0.05394743010401726], + [-0.7306843400001526, -1.0346637964248657, 0.47165822982788086, -0.31658414006233215], + [-0.6593214273452759, 0.24809309840202332, 0.7311660647392273, 2.3288936614990234], + [-1.06203031539917, -0.5385692119598389, 0.05127298831939697, -0.5355269908905029] + ] + ], + [ + [ + [1.0171878337860107, -0.548503041267395, 1.6083859205245972, 0.5863268971443176], + [0.15758942067623138, -0.9828112125396729, 0.5712791085243225, -0.6984293460845947], + [0.4230518639087677, 0.5434484481811523, -1.2116377353668213, 0.1757367104291916], + [0.25499454140663147, 0.8070029616355896, 0.4390753507614136, -0.4656812846660614] + ], + [ + [0.7719241976737976, 2.0046799182891846, -0.05154263228178024, -0.09019286930561066], + [0.9585400819778442, -1.2275644540786743, -0.8988152146339417, -2.1179587841033936], + [0.6541568636894226, -0.43890517950057983, -0.05930287018418312, -1.202606201171875], + [-1.2769453525543213, -0.3968345820903778, -0.311004638671875, 1.5671075582504272] + ], + [ + [0.23923389613628387, 1.4039400815963745, -2.0282845497131348, -1.0275602340698242], + [0.9761411547660828, -0.06743653863668442, 0.15256273746490479, -0.5650336742401123], + [0.19450226426124573, -0.7134578824043274, 0.16818134486675262, -1.1888638734817505], + [-0.37875133752822876, -0.321153849363327, 1.1533565521240234, -1.292052984237671] + ], + [ + [1.7734715938568115, 1.655669927597046, -0.08070362359285355, -0.303115576505661], + [0.3240552544593811, 1.0401465892791748, 0.4352516531944275, 0.1256813257932663], + [-0.12040789425373077, 0.16437974572181702, 0.7806977033615112, 1.228264331817627], + [0.4453698992729187, -0.3023916184902191, 0.37783581018447876, 1.7510546445846558] + ] + ], + [ + [ + [-0.3600947856903076, -1.0252388715744019, 0.37429946660995483, -0.3791492283344269], + [2.650177001953125, -1.6031533479690552, 0.19602227210998535, 0.40920931100845337], + [-0.21724168956279755, -0.5731971859931946, 0.37175968289375305, -0.4610559642314911], + [2.174776315689087, 0.021914366632699966, 1.5658904314041138, 1.920691728591919] + ], + [ + [0.6814432144165039, 0.772460401058197, 0.007647526450455189, -0.25153985619544983], + [0.84389328956604, -0.2621386647224426, 0.6312791705131531, 0.1832093447446823], + [0.011438353918492794, 0.0011970907216891646, 0.672935426235199, -0.6712173819541931], + [-0.2299986481666565, 0.13960936665534973, -2.052098274230957, 1.4737012386322021] + ], + [ + [-0.9974878430366516, -0.9902269244194031, -0.1406998336315155, 0.8166614174842834], + [1.0789191722869873, 1.7252999544143677, -0.2263449877500534, -0.004874890204519033], + [-0.11877616494894028, -0.34278497099876404, -0.08240477740764618, 0.8542949557304382], + [-0.9149259924888611, 0.5388334393501282, 0.43459662795066833, -0.927284836769104] + ], + [ + [-0.4312492907047272, 0.4804547131061554, -0.0037577631883323193, -0.23722761869430542], + [0.1753881573677063, -0.3416663408279419, -1.1473757028579712, -0.0227875467389822], + [0.8689031600952148, -0.6073640584945679, 0.11616495996713638, 1.9142365455627441], + [-1.1450425386428833, 1.60905921459198, -0.6460893750190735, 0.5232442021369934] + ] + ] + ] + ], + "weight": [ + [ + [ + [ + [0.018023155629634857, -0.10342481732368469, -0.02519121766090393], + [-0.06361400336027145, 0.024651339277625084, -0.09643067419528961], + [0.08391905575990677, -0.13368600606918335, -0.07455432415008545] + ], + [ + [0.02808072417974472, -0.13384917378425598, 0.03354187309741974], + [-0.05091074854135513, -0.06872890144586563, 0.0975482165813446], + [-0.04394025728106499, 0.03216959536075592, -0.05254532769322395] + ], + [ + [0.06366001069545746, 0.12392594665288925, 0.1306309849023819], + [-0.08231638371944427, -0.1352720409631729, -0.09515999257564545], + [0.038335442543029785, 0.054630495607852936, 0.07147255539894104] + ] + ], + [ + [ + [0.059092968702316284, 0.13238413631916046, -0.10319491475820541], + [-0.10668620467185974, -0.042614519596099854, 0.10562329739332199], + [-0.10449402779340744, 0.10202737897634506, 0.022643063217401505] + ], + [ + [0.0841764360666275, 0.13482370972633362, 0.13245315849781036], + [0.13064035773277283, -0.033394865691661835, -0.05806885287165642], + [0.057771969586610794, 0.11604811251163483, 0.003388179698958993] + ], + [ + [-0.03221295773983002, -0.02200985699892044, -0.048572737723588943], + [0.06534195691347122, -0.13534505665302277, 0.11579547822475433], + [-0.004997386131435633, -0.08249450474977493, -0.04854116961359978] + ] + ] + ], + [ + [ + [ + [-0.08353452384471893, -0.11824506521224976, 0.013692961074411869], + [-0.030905533581972122, -0.06108633428812027, -0.023175803944468498], + [-0.018762487918138504, 0.05995330214500427, -0.1100788563489914] + ], + [ + [-0.003190705319866538, -0.001857181079685688, -0.11124534159898758], + [0.12544086575508118, -0.06451167911291122, 0.10796535015106201], + [0.10430539399385452, 0.003685534931719303, 0.10132654011249542] + ], + [ + [-0.10240200161933899, -0.014958562329411507, -0.11225347965955734], + [-0.08284203708171844, 0.042994432151317596, 0.10140262544155121], + [-0.08075521141290665, -0.05498553812503815, -0.07630179822444916] + ] + ], + [ + [ + [-0.011823110282421112, -0.11303313821554184, -0.10771020501852036], + [-0.029605654999613762, 0.13074901700019836, 0.02344232238829136], + [-0.05664527788758278, 0.0909726545214653, 0.1004367470741272] + ], + [ + [0.09300453215837479, 0.0012665909016504884, -0.014742723666131496], + [0.08838989585638046, 0.07315170764923096, 0.1031828299164772], + [0.09800897538661957, -0.03862697258591652, 0.11525991559028625] + ], + [ + [0.026624606922268867, 0.13548168540000916, 0.13537754118442535], + [0.1141292005777359, 0.09058520197868347, 0.10684870183467865], + [-0.06396123766899109, -0.05297635495662689, -0.03171420097351074] + ] + ] + ], + [ + [ + [ + [-0.06965107470750809, 0.09998120367527008, 0.01272142119705677], + [-0.03446612507104874, -0.06096961721777916, -0.12115637212991714], + [0.009764074347913265, 0.11962190270423889, 0.08947835117578506] + ], + [ + [0.10785743594169617, 0.09297690540552139, 0.05726354569196701], + [-0.003918682225048542, -0.10067062079906464, 0.10005360841751099], + [-0.09575940668582916, -0.08183123916387558, 0.054605741053819656] + ], + [ + [0.07473588734865189, -0.07081582397222519, 0.08612146228551865], + [-0.12735553085803986, 0.03242866322398186, 0.0600760243833065], + [0.025276677682995796, -0.07657472044229507, -0.09190274775028229] + ] + ], + [ + [ + [0.10876334458589554, 0.029979806393384933, -0.05846203491091728], + [0.13107377290725708, -0.028404781594872475, -0.025164403021335602], + [0.03502681851387024, 0.1132957935333252, -0.06057724729180336] + ], + [ + [0.0006860585417598486, -0.06109375134110451, -0.061637554317712784], + [-0.018549779430031776, -0.0001401933841407299, 0.0729675218462944], + [-0.1016455665230751, 0.07896950840950012, 0.10631748288869858] + ], + [ + [-0.015513543970882893, -0.012190010398626328, 0.03355866298079491], + [-0.12458645552396774, 0.10013227164745331, -0.0238055232912302], + [0.005472911521792412, 0.06451578438282013, 0.05666399747133255] + ] + ] + ], + [ + [ + [ + [-0.06296601891517639, -0.0999550074338913, -0.13127993047237396], + [-0.11363308876752853, -0.028607089072465897, -0.10564422607421875], + [-0.01374067086726427, -0.07189460843801498, 0.07710906863212585] + ], + [ + [0.09308194369077682, 0.11512674391269684, -0.08923662453889847], + [-0.09603527188301086, 0.04720672219991684, -0.031076064333319664], + [0.10320258885622025, -0.12484192848205566, 0.12619450688362122] + ], + [ + [-0.09438951313495636, -0.10912685096263885, -0.01443044375628233], + [-0.060285359621047974, -0.08578546345233917, 0.07769076526165009], + [0.10509980469942093, 0.07576528936624527, 0.016610369086265564] + ] + ], + [ + [ + [-0.019582444801926613, -0.07424866408109665, 0.12970370054244995], + [-0.07411813735961914, -0.1193142682313919, 0.11545565724372864], + [-0.010974876582622528, 0.09143692255020142, -0.04516752436757088] + ], + [ + [0.105990931391716, -0.09596171975135803, 0.08189238607883453], + [0.1059148982167244, -0.0019025224028155208, 0.09024524688720703], + [0.004828982055187225, -0.12459451705217361, 0.059617094695568085] + ], + [ + [-0.10579071193933487, 0.004276611842215061, -0.12240993976593018], + [-0.06479141861200333, -0.040547553449869156, -0.05288422852754593], + [-0.1340492218732834, -0.06759043782949448, -0.12609083950519562] + ] + ] + ] + ], + "expected_output": [ + [ + [ + [ + [0.10836861282587051, 0.1434028148651123, 0.07617371529340744, -0.24530893564224243], + [0.022334538400173187, 0.16089153289794922, 0.5011749267578125, 0.2169124186038971], + [-0.8152617812156677, -0.13277244567871094, -0.027908481657505035, -0.28777015209198], + [-0.2681388258934021, -0.6095927953720093, 0.5417340397834778, 0.11647675186395645] + ], + [ + [-0.3600711524486542, 0.36616450548171997, 0.11842126399278641, 0.2873610258102417], + [-0.11809677630662918, 0.16681700944900513, -0.2018146812915802, 0.1676146686077118], + [-0.24849003553390503, 0.856722891330719, 0.012479536235332489, 0.03260960429906845], + [-0.2794792354106903, -0.4788385033607483, 0.2933104634284973, 0.008262306451797485] + ], + [ + [0.9250420928001404, -0.5294893980026245, -0.08883032947778702, -0.3321789503097534], + [-1.1114075183868408, 0.12585216760635376, -0.11730637401342392, -0.24465978145599365], + [0.9605368971824646, 0.6088504195213318, 0.36330196261405945, 0.1861497461795807], + [-0.3596881628036499, -0.9545287489891052, -1.0544847249984741, 0.1572037935256958] + ], + [ + [-0.23569238185882568, 0.10663621872663498, -0.4488075077533722, -0.15766653418540955], + [-0.2282351702451706, -0.9508301019668579, 0.07240805774927139, 0.137204110622406], + [-0.7541200518608093, -0.13448819518089294, -0.03534723073244095, -0.4396282732486725], + [-0.01624389737844467, -0.05975595861673355, 0.38338232040405273, 0.26456719636917114] + ] + ], + [ + [ + [0.0914262980222702, 0.7680975794792175, -0.09522223472595215, -0.2208048701286316], + [-0.12967567145824432, 0.6833475232124329, 0.15317025780677795, -0.19670507311820984], + [0.4770544171333313, 0.8055360317230225, 0.8930045962333679, 0.5797280669212341], + [-0.17343679070472717, -0.5099503993988037, -0.27266472578048706, -0.16489577293395996] + ], + [ + [0.3155290484428406, -0.1976332664489746, -0.19844186305999756, 0.06969204545021057], + [-0.06693869829177856, -0.0734449028968811, 0.7247299551963806, -0.47656312584877014], + [-0.32607942819595337, -0.5905663967132568, 0.4287152886390686, 0.08322885632514954], + [-0.9638822078704834, -0.5567367672920227, -0.6854769587516785, -0.3250684142112732] + ], + [ + [-0.5361930131912231, -0.014801502227783203, -0.37280017137527466, -0.6570376753807068], + [0.17725735902786255, 0.01413118839263916, -0.40995731949806213, -0.05875888466835022], + [-0.7748088836669922, -0.5375179648399353, -0.32948052883148193, 0.058018386363983154], + [-0.5657485723495483, -0.1449487805366516, -0.35549309849739075, -0.02246960997581482] + ], + [ + [-0.018765531480312347, 0.036078572273254395, 0.029878392815589905, -0.05353579670190811], + [-0.11946308612823486, -0.7185779213905334, 0.1332222819328308, -0.1480204164981842], + [-0.9427189826965332, -0.14275342226028442, 0.18042618036270142, 0.5569607019424438], + [0.006041407585144043, -0.13803455233573914, -0.48807016015052795, 0.356815904378891] + ] + ], + [ + [ + [-0.06383644789457321, 0.06947135925292969, -0.026793554425239563, -0.003535926342010498], + [-0.0698959082365036, -0.27908724546432495, 0.40270525217056274, 0.30678045749664307], + [-0.08524920791387558, -0.40588659048080444, -0.22960542142391205, -0.2935093641281128], + [-0.07879580557346344, 0.31575489044189453, -0.3577684164047241, 0.42365896701812744] + ], + [ + [0.47740018367767334, -0.7280412316322327, -0.48286008834838867, 0.4542393088340759], + [0.23268620669841766, -0.0064995065331459045, -0.013882175087928772, 0.14692038297653198], + [0.2875715494155884, 0.15558455884456635, -0.5822675824165344, 0.35174304246902466], + [-0.26513993740081787, 0.2030189484357834, -0.3075501322746277, -0.2294529527425766] + ], + [ + [-0.0875728577375412, -1.096677541732788, -0.7226636409759521, -0.14330235123634338], + [0.41013598442077637, 0.32467103004455566, -0.11465035378932953, -0.03907476365566254], + [-0.19876301288604736, 0.12371616065502167, -0.5176448225975037, 0.33613336086273193], + [-0.00326479971408844, 0.42454051971435547, -0.6129534244537354, -0.47633981704711914] + ], + [ + [-0.05132363736629486, -0.19670501351356506, -0.2644253969192505, 0.03174002468585968], + [0.15850017964839935, -0.08815433084964752, -0.023564592003822327, -0.1290639042854309], + [-0.062078848481178284, 0.5878133177757263, 0.7137229442596436, -0.5141631960868835], + [0.04793590307235718, -0.31486088037490845, 0.24411986768245697, -0.15430687367916107] + ] + ], + [ + [ + [-0.5774490833282471, -0.25177302956581116, -0.755704939365387, -0.3521902561187744], + [-0.3783379793167114, -0.4263206422328949, -0.2581055164337158, 0.16536319255828857], + [-0.8054671287536621, 0.3439699113368988, -0.3840625584125519, 0.5622360706329346], + [-0.056073784828186035, 0.7216264009475708, 0.17985312640666962, 0.0789409726858139] + ], + [ + [-0.23849238455295563, -0.18634046614170074, -0.8208248615264893, 0.20926734805107117], + [-0.8225386738777161, -0.04978513717651367, 0.05448875576257706, 0.23044981062412262], + [-0.2830033302307129, -0.031862758100032806, 0.24454952776432037, -0.20136071741580963], + [-0.42660996317863464, 0.26947957277297974, -0.1839623898267746, -0.2741720378398895] + ], + [ + [-0.5711663961410522, -0.4550422132015228, -0.5188841819763184, 0.6487977504730225], + [-0.6336983442306519, -0.18077149987220764, -0.08758194744586945, -0.1853662133216858], + [0.22357551753520966, 0.5520566701889038, -0.08997183293104172, 0.6255630850791931], + [0.1833794265985489, -0.1741248220205307, 0.2436821609735489, -0.6031883358955383] + ], + [ + [-0.1105915755033493, 0.24855990707874298, 0.15290819108486176, 0.20295776426792145], + [-0.3275313079357147, 0.09667418897151947, 0.4353748857975006, -0.16768665611743927], + [-0.059414178133010864, -0.052108630537986755, 0.8092654347419739, -0.3929613530635834], + [0.2403830736875534, -0.1458280086517334, 0.5066218972206116, 0.019905833527445793] + ] + ] + ] + ], + "expected_grad_input": [ + [ + [ + [ + [-0.5876625776290894, -0.5909571647644043, -0.5909571647644043, -0.5303464531898499], + [-0.5000184178352356, -0.6391649842262268, -0.6391649842262268, -0.7040759921073914], + [-0.5000184178352356, -0.6391649842262268, -0.6391649842262268, -0.7040759921073914], + [-0.10202086716890335, -0.15196572244167328, -0.15196572244167328, -0.25749802589416504] + ], + [ + [-0.7748732566833496, -0.753547728061676, -0.753547728061676, -0.489036500453949], + [-0.7300039529800415, -0.8493596315383911, -0.8493596315383911, -0.6679503917694092], + [-0.7300039529800415, -0.8493596315383911, -0.8493596315383911, -0.6679503917694092], + [-0.40223175287246704, -0.4507632255554199, -0.4507632255554199, -0.34871727228164673] + ], + [ + [-0.7748732566833496, -0.753547728061676, -0.753547728061676, -0.489036500453949], + [-0.7300039529800415, -0.8493596315383911, -0.8493596315383911, -0.6679503917694092], + [-0.7300039529800415, -0.8493596315383911, -0.8493596315383911, -0.6679503917694092], + [-0.40223175287246704, -0.4507632255554199, -0.4507632255554199, -0.34871727228164673] + ], + [ + [-0.3567374348640442, -0.2043071985244751, -0.2043071985244751, -0.09982690960168839], + [-0.3032919764518738, -0.10690975189208984, -0.10690975189208984, -0.020374871790409088], + [-0.3032919764518738, -0.10690975189208984, -0.10690975189208984, -0.020374871790409088], + [-0.26270103454589844, -0.006992869079113007, -0.006992869079113007, 0.0656900703907013] + ] + ], + [ + [ + [0.5905218720436096, 0.6715068817138672, 0.6715068817138672, 0.3643175959587097], + [0.8555846810340881, 1.178297519683838, 1.178297519683838, 0.8764665722846985], + [0.8555846810340881, 1.178297519683838, 1.178297519683838, 0.8764665722846985], + [0.4756925106048584, 0.8916000127792358, 0.8916000127792358, 0.8142200708389282] + ], + [ + [0.8331167101860046, 1.22355055809021, 1.22355055809021, 0.7424785494804382], + [0.893750011920929, 1.4456565380096436, 1.4456565380096436, 1.038901448249817], + [0.893750011920929, 1.4456565380096436, 1.4456565380096436, 1.038901448249817], + [0.4059743881225586, 0.9642707109451294, 0.9642707109451294, 0.7763781547546387] + ], + [ + [0.8331167101860046, 1.22355055809021, 1.22355055809021, 0.7424785494804382], + [0.893750011920929, 1.4456565380096436, 1.4456565380096436, 1.038901448249817], + [0.893750011920929, 1.4456565380096436, 1.4456565380096436, 1.038901448249817], + [0.4059743881225586, 0.9642707109451294, 0.9642707109451294, 0.7763781547546387] + ], + [ + [0.8146531581878662, 1.286926507949829, 1.286926507949829, 0.7168325185775757], + [0.8434257507324219, 1.3540918827056885, 1.3540918827056885, 0.6971754431724548], + [0.8434257507324219, 1.3540918827056885, 1.3540918827056885, 0.6971754431724548], + [0.4222710132598877, 0.7284219264984131, 0.7284219264984131, 0.24309813976287842] + ] + ], + [ + [ + [-0.11464162170886993, -0.42299625277519226, -0.42299625277519226, -0.24326539039611816], + [-0.27012091875076294, -0.23108789324760437, -0.23108789324760437, -0.054823607206344604], + [-0.27012091875076294, -0.23108789324760437, -0.23108789324760437, -0.054823607206344604], + [-0.5465730428695679, -0.3570084571838379, -0.3570084571838379, -0.11242188513278961] + ], + [ + [-0.5552356243133545, -0.6541324853897095, -0.6541324853897095, -0.2671070396900177], + [-0.5811477899551392, -0.40794938802719116, -0.40794938802719116, -0.15476706624031067], + [-0.5811477899551392, -0.40794938802719116, -0.40794938802719116, -0.15476706624031067], + [-0.6580036878585815, -0.4059647023677826, -0.4059647023677826, -0.10411368310451508] + ], + [ + [-0.5552356243133545, -0.6541324853897095, -0.6541324853897095, -0.2671070396900177], + [-0.5811477899551392, -0.40794938802719116, -0.40794938802719116, -0.15476706624031067], + [-0.5811477899551392, -0.40794938802719116, -0.40794938802719116, -0.15476706624031067], + [-0.6580036878585815, -0.4059647023677826, -0.4059647023677826, -0.10411368310451508] + ], + [ + [-0.18496879935264587, 0.061493486166000366, 0.061493486166000366, 0.1678025722503662], + [-0.2546316683292389, 0.09733840823173523, 0.09733840823173523, 0.06582783162593842], + [-0.2546316683292389, 0.09733840823173523, 0.09733840823173523, 0.06582783162593842], + [-0.46407845616340637, -0.15182627737522125, -0.15182627737522125, -0.0020511001348495483] + ] + ], + [ + [ + [-0.010907456278800964, 0.3340930938720703, 0.3340930938720703, -0.006085563451051712], + [0.07543560862541199, 0.4806259274482727, 0.4806259274482727, 0.21321193873882294], + [0.07543560862541199, 0.4806259274482727, 0.4806259274482727, 0.21321193873882294], + [0.08090204000473022, 0.3945958912372589, 0.3945958912372589, 0.3230397701263428] + ], + [ + [-0.26991820335388184, -0.09045873582363129, -0.09045873582363129, -0.11995527148246765], + [-0.31522613763809204, -0.1450037807226181, -0.1450037807226181, 0.026840750128030777], + [-0.31522613763809204, -0.1450037807226181, -0.1450037807226181, 0.026840750128030777], + [-0.1805420219898224, -0.012964814901351929, -0.012964814901351929, 0.23343324661254883] + ], + [ + [-0.26991820335388184, -0.09045873582363129, -0.09045873582363129, -0.11995527148246765], + [-0.31522613763809204, -0.1450037807226181, -0.1450037807226181, 0.026840750128030777], + [-0.31522613763809204, -0.1450037807226181, -0.1450037807226181, 0.026840750128030777], + [-0.1805420219898224, -0.012964814901351929, -0.012964814901351929, 0.23343324661254883] + ], + [ + [-0.2240668535232544, -0.20614027976989746, -0.20614027976989746, -0.08950027078390121], + [-0.49815940856933594, -0.38372519612312317, -0.38372519612312317, -0.04169219732284546], + [-0.49815940856933594, -0.38372519612312317, -0.38372519612312317, -0.04169219732284546], + [-0.3185632526874542, -0.13553252816200256, -0.13553252816200256, 0.19187310338020325] + ] + ] + ] + ], + "expected_grad_weight": [ + [ + [ + [ + [1.0112146139144897, -1.5523207187652588, -5.480266571044922], + [3.7865498065948486, -1.0164130926132202, -7.3984456062316895], + [4.751798629760742, 0.8878644704818726, -4.994378089904785] + ], + [ + [3.412637710571289, 2.650357961654663, -1.8046751022338867], + [9.186384201049805, 5.024697303771973, -2.3373513221740723], + [7.520097255706787, 3.3628487586975098, -2.444410800933838] + ], + [ + [-0.4129352569580078, -0.052878379821777344, 0.5313718318939209], + [3.1400108337402344, 1.6645159721374512, 1.0393695831298828], + [2.2695987224578857, 0.8437265157699585, 1.0831925868988037] + ] + ], + [ + [ + [8.54604721069336, 8.586409568786621, 6.928125381469727], + [3.787848472595215, 4.397752285003662, 5.675779819488525], + [0.7362345457077026, 2.690483570098877, 4.390048980712891] + ], + [ + [5.832021713256836, 7.830746173858643, 8.983281135559082], + [-0.47550344467163086, 1.5572357177734375, 6.708112716674805], + [-1.7868441343307495, 1.6441874504089355, 5.795788764953613] + ], + [ + [-0.5267853736877441, 1.0879532098770142, 4.65605354309082], + [-6.562339782714844, -5.276653289794922, 0.6988917589187622], + [-4.402717590332031, -1.619564414024353, 0.9286255836486816] + ] + ] + ], + [ + [ + [ + [1.0112146139144897, -1.5523207187652588, -5.480266571044922], + [3.7865498065948486, -1.0164130926132202, -7.3984456062316895], + [4.751798629760742, 0.8878644704818726, -4.994378089904785] + ], + [ + [3.412637710571289, 2.650357961654663, -1.8046751022338867], + [9.186384201049805, 5.024697303771973, -2.3373513221740723], + [7.520097255706787, 3.3628487586975098, -2.444410800933838] + ], + [ + [-0.4129352569580078, -0.052878379821777344, 0.5313718318939209], + [3.1400108337402344, 1.6645159721374512, 1.0393695831298828], + [2.2695987224578857, 0.8437265157699585, 1.0831925868988037] + ] + ], + [ + [ + [8.54604721069336, 8.586409568786621, 6.928125381469727], + [3.787848472595215, 4.397752285003662, 5.675779819488525], + [0.7362345457077026, 2.690483570098877, 4.390048980712891] + ], + [ + [5.832021713256836, 7.830746173858643, 8.983281135559082], + [-0.47550344467163086, 1.5572357177734375, 6.708112716674805], + [-1.7868441343307495, 1.6441874504089355, 5.795788764953613] + ], + [ + [-0.5267853736877441, 1.0879532098770142, 4.65605354309082], + [-6.562339782714844, -5.276653289794922, 0.6988917589187622], + [-4.402717590332031, -1.619564414024353, 0.9286255836486816] + ] + ] + ], + [ + [ + [ + [3.6165435314178467, -2.512037754058838, -7.904365062713623], + [3.5862832069396973, -2.732924699783325, -6.724549770355225], + [-0.8307381868362427, -6.6185197830200195, -8.581799507141113] + ], + [ + [9.589104652404785, 4.511353492736816, -2.858092784881592], + [10.079658508300781, 6.562335968017578, 0.14822149276733398], + [2.3141989707946777, -0.36858272552490234, -2.980879783630371] + ], + [ + [8.011114120483398, 2.8697285652160645, -2.901888847351074], + [7.0005950927734375, 3.885319232940674, -0.675971508026123], + [1.3122061491012573, -0.3822016716003418, -2.158862590789795] + ] + ], + [ + [ + [3.078982353210449, 3.574519634246826, 0.002249002456665039], + [4.757579803466797, 7.7202253341674805, 3.11810302734375], + [6.435477256774902, 9.212150573730469, 3.933889150619507] + ], + [ + [2.1884801387786865, 4.338238716125488, 0.15292620658874512], + [3.685004711151123, 8.825116157531738, 4.7549943923950195], + [5.317454814910889, 10.508821487426758, 5.331311225891113] + ], + [ + [2.375147581100464, 4.955902099609375, 2.8434300422668457], + [0.10909116268157959, 3.7595064640045166, 3.9370014667510986], + [0.7305070161819458, 4.053028583526611, 3.483229637145996] + ] + ] + ], + [ + [ + [ + [3.6165435314178467, -2.512037754058838, -7.904365062713623], + [3.5862832069396973, -2.732924699783325, -6.724549770355225], + [-0.8307381868362427, -6.6185197830200195, -8.581799507141113] + ], + [ + [9.589104652404785, 4.511353492736816, -2.858092784881592], + [10.079658508300781, 6.562335968017578, 0.14822149276733398], + [2.3141989707946777, -0.36858272552490234, -2.980879783630371] + ], + [ + [8.011114120483398, 2.8697285652160645, -2.901888847351074], + [7.0005950927734375, 3.885319232940674, -0.675971508026123], + [1.3122061491012573, -0.3822016716003418, -2.158862590789795] + ] + ], + [ + [ + [3.078982353210449, 3.574519634246826, 0.002249002456665039], + [4.757579803466797, 7.7202253341674805, 3.11810302734375], + [6.435477256774902, 9.212150573730469, 3.933889150619507] + ], + [ + [2.1884801387786865, 4.338238716125488, 0.15292620658874512], + [3.685004711151123, 8.825116157531738, 4.7549943923950195], + [5.317454814910889, 10.508821487426758, 5.331311225891113] + ], + [ + [2.375147581100464, 4.955902099609375, 2.8434300422668457], + [0.10909116268157959, 3.7595064640045166, 3.9370014667510986], + [0.7305070161819458, 4.053028583526611, 3.483229637145996] + ] + ] + ] + ], + "bias": [-0.11376721411943436, -0.13576680421829224, -0.07839275896549225, -0.030493957921862602], + "expected_grad_bias": [64.0, 64.0, 64.0, 64.0] + } + ], + "export": [ + { + "test_name": "linear_relu", + "model_type": "LinearReLU", + "input": [ + [-1.1228563785552979, -0.18632829189300537, 2.2082014083862305], + [-0.637997031211853, 0.46165722608566284, 0.2673508822917938] + ], + "weight": [ + [0.4414065182209015, 0.47920528054237366, -0.1352572739124298], + [0.5303604602813721, -0.1264995038509369, 0.11650390923023224] + ], + "bias": [-0.28108587861061096, 0.3390677869319916], + "in_features": 3, + "out_features": 2, + "expected_nodes": [ + { + "op": "placeholder", + "name": "p_0_weight", + "target": "p_0_weight", + "args": [], + "val_shape": [2, 3] + }, + { + "op": "placeholder", + "name": "p_0_bias", + "target": "p_0_bias", + "args": [], + "val_shape": [2] + }, + { + "op": "placeholder", + "name": "input", + "target": "input", + "args": [], + "val_shape": [2, 3] + }, + { + "op": "call_function", + "name": "linear", + "target": "aten.linear.default", + "args": ["input", "p_0_weight", "p_0_bias"], + "val_shape": [2, 2] + }, + { + "op": "call_function", + "name": "relu", + "target": "aten.relu.default", + "args": ["linear"], + "val_shape": [2, 2] + }, + { + "op": "output", + "name": "output", + "target": "output", + "args": [ + ["relu"] + ] + } + ], + "expected_input_specs": [ + { + "kind": "PARAMETER", + "name": "p_0_weight" + }, + { + "kind": "PARAMETER", + "name": "p_0_bias" + }, + { + "kind": "USER_INPUT", + "name": "input" + } + ], + "expected_output_specs": [ + { + "kind": "USER_OUTPUT", + "name": "relu" + } + ] + }, + { + "test_name": "two_layer", + "model_type": "TwoLayer", + "input": [ + [-0.331889808177948, -0.4784944951534271, -0.2630537450313568, -0.17855434119701385], + [-1.1858534812927246, -0.8860282301902771, -0.7149547338485718, 0.12802214920520782] + ], + "linear1_weight": [ + [0.3822692632675171, 0.4150039553642273, -0.11713624000549316, 0.45930564403533936], + [-0.10955178737640381, 0.10089534521102905, -0.24342751502990723, 0.2936413288116455], + [0.4407714605331421, -0.3668140769004822, 0.43459808826446533, 0.09357964992523193] + ], + "linear1_bias": [0.3694044351577759, 0.06771528720855713, 0.24109405279159546], + "linear2_weight": [ + [-0.08151666074991226, 0.44507113099098206, 0.08533751219511032], + [-0.26953014731407166, 0.14716561138629913, -0.2660064101219177] + ], + "linear2_bias": [-0.06770750880241394, -0.234495609998703], + "linear1_in": 4, + "linear1_out": 3, + "linear2_in": 3, + "linear2_out": 2, + "expected_nodes": [ + { + "op": "placeholder", + "name": "p_0_weight", + "target": "p_0_weight", + "args": [], + "val_shape": [3, 4] + }, + { + "op": "placeholder", + "name": "p_0_bias", + "target": "p_0_bias", + "args": [], + "val_shape": [3] + }, + { + "op": "placeholder", + "name": "p_2_weight", + "target": "p_2_weight", + "args": [], + "val_shape": [2, 3] + }, + { + "op": "placeholder", + "name": "p_2_bias", + "target": "p_2_bias", + "args": [], + "val_shape": [2] + }, + { + "op": "placeholder", + "name": "input", + "target": "input", + "args": [], + "val_shape": [2, 4] + }, + { + "op": "call_function", + "name": "linear", + "target": "aten.linear.default", + "args": ["input", "p_0_weight", "p_0_bias"], + "val_shape": [2, 3] + }, + { + "op": "call_function", + "name": "relu", + "target": "aten.relu.default", + "args": ["linear"], + "val_shape": [2, 3] + }, + { + "op": "call_function", + "name": "linear_1", + "target": "aten.linear.default", + "args": ["relu", "p_2_weight", "p_2_bias"], + "val_shape": [2, 2] + }, + { + "op": "call_function", + "name": "sigmoid", + "target": "aten.sigmoid.default", + "args": ["linear_1"], + "val_shape": [2, 2] + }, + { + "op": "output", + "name": "output", + "target": "output", + "args": [ + ["sigmoid"] + ] + } + ], + "expected_input_specs": [ + { + "kind": "PARAMETER", + "name": "p_0_weight" + }, + { + "kind": "PARAMETER", + "name": "p_0_bias" + }, + { + "kind": "PARAMETER", + "name": "p_2_weight" + }, + { + "kind": "PARAMETER", + "name": "p_2_bias" + }, + { + "kind": "USER_INPUT", + "name": "input" + } + ], + "expected_output_specs": [ + { + "kind": "USER_OUTPUT", + "name": "sigmoid" + } + ] + } + ], + "loss": [ + { + "test_name": "mse_1d", + "loss_type": "MSELoss", + "input": [-0.16030240058898926, -2.2161147594451904, -0.6857729554176331, -0.32950884103775024, -0.2747350335121155], + "target": [-1.255190372467041, -0.7812865376472473, 0.2293395847082138, -1.2753872871398926, -1.9245343208312988], + "expected_output": 1.5422934293746948, + "expected_grad_input": [0.43795520067214966, -0.5739313364028931, -0.3660450279712677, 0.3783513903617859, 0.6599197387695312] + }, + { + "test_name": "mse_2d", + "loss_type": "MSELoss", + "input": [ + [0.4335620105266571, 0.664082407951355, -0.43367648124694824, -0.42010796070098877], + [-0.9499658942222595, -1.0013846158981323, -0.771919310092926, 1.3434035778045654], + [0.955953061580658, -1.0110232830047607, -0.356810599565506, 0.7146782279014587] + ], + "target": [ + [-0.23975907266139984, 0.21629300713539124, 0.5484155416488647, -0.44147154688835144], + [1.5815293788909912, -0.19812652468681335, 0.9553706645965576, -1.0901628732681274], + [2.595151424407959, 2.750434637069702, 0.648761510848999, 0.44963163137435913] + ], + "expected_output": 2.9579389095306396, + "expected_grad_input": [ + [0.11222018301486969, 0.07463157176971436, -0.1636820137500763, 0.003560597775503993], + [-0.4219158887863159, -0.13387635350227356, -0.28788167238235474, 0.40559443831443787], + [-0.27319973707199097, -0.6269096732139587, -0.16759535670280457, 0.0441744327545166] + ] + }, + { + "test_name": "mse_3d", + "loss_type": "MSELoss", + "input": [ + [ + [-0.35762301087379456, -0.31650829315185547, 0.5886339545249939, -0.8904569149017334], + [0.4098125100135803, -1.457029104232788, -0.10233523696660995, -0.5991530418395996], + [-1.1298598051071167, -0.1360345333814621, 1.6354095935821533, 0.6547407507896423] + ], + [ + [0.5760045647621155, -0.36090990900993347, -0.06059038266539574, 0.07325476408004761], + [0.9254348874092102, -0.3753443658351898, 1.0330873727798462, -0.6866509318351746], + [0.6368136405944824, 0.2175532579421997, -0.04665505886077881, -1.433521032333374] + ] + ], + "target": [ + [ + [1.450609803199768, 0.2694815397262573, -0.21037597954273224, -0.7328027486801147], + [0.10429783165454865, 1.0414010286331177, -0.3997306227684021, -2.293334484100342], + [1.1179102659225464, -1.2955713272094727, 0.05027642846107483, -0.5854812264442444] + ], + [ + [-0.3899965286254883, 0.03581761196255684, 0.12058872729539871, -0.8056637048721313], + [0.20801466703414917, -1.158583402633667, -0.9636622667312622, -0.3749968111515045], + [0.8032528162002563, -0.5187733173370361, -1.5012763738632202, -1.9266542196273804] + ] + ], + "expected_output": 1.4189788103103638, + "expected_grad_input": [ + [ + [-0.15068607032299042, -0.0488324873149395, 0.06658416241407394, -0.013137847185134888], + [0.02545955777168274, -0.20820252597332, 0.024782948195934296, 0.1411817967891693], + [-0.18731418251991272, 0.09662806987762451, 0.13209444284439087, 0.10335183143615723] + ], + [ + [0.08050009608268738, -0.03306062892079353, -0.01509825885295868, 0.07324320822954178], + [0.05978501960635185, 0.06526991724967957, 0.16639581322669983, -0.025971177965402603], + [-0.013869931921362877, 0.06136054918169975, 0.12121844291687012, 0.04109443351626396] + ] + ] + }, + { + "test_name": "l1_1d", + "loss_type": "L1Loss", + "input": [-0.3083152174949646, 1.010894775390625, 0.3823634088039398, -1.002435326576233, 1.3410226106643677], + "target": [1.0322986841201782, 0.883126974105835, 0.8391237258911133, -1.2442442178726196, -0.4766048491001129], + "expected_output": 0.7969156503677368, + "expected_grad_input": [-0.20000000298023224, 0.20000000298023224, -0.20000000298023224, 0.20000000298023224, 0.20000000298023224] + }, + { + "test_name": "l1_2d", + "loss_type": "L1Loss", + "input": [ + [-0.8057987689971924, 0.46343186497688293, 0.7677170634269714, -1.102782130241394], + [1.7588211297988892, -1.8872032165527344, -2.220559597015381, 0.3306517004966736], + [0.48136675357818604, 1.4244316816329956, -0.31612974405288696, 0.08585795760154724] + ], + "target": [ + [0.9491400718688965, -0.10296741127967834, 2.5101442337036133, 0.1954755336046219], + [-0.014661739580333233, 0.2708866000175476, -0.20515739917755127, -0.6051543354988098], + [-0.5226790308952332, 0.9970949292182922, 0.3249934911727905, 0.14241407811641693] + ], + "expected_output": 1.1978222131729126, + "expected_grad_input": [ + [-0.0833333358168602, 0.0833333358168602, -0.0833333358168602, -0.0833333358168602], + [0.0833333358168602, -0.0833333358168602, -0.0833333358168602, 0.0833333358168602], + [0.0833333358168602, 0.0833333358168602, -0.0833333358168602, -0.0833333358168602] + ] + }, + { + "test_name": "l1_3d", + "loss_type": "L1Loss", + "input": [ + [ + [-0.01884661614894867, -0.685481607913971, 0.5635589957237244, -1.507175087928772], + [-1.610666036605835, -1.4790465831756592, 0.4322742819786072, -0.1250254064798355], + [0.05792269483208656, 0.8637403845787048, -0.5890002846717834, -1.0340019464492798] + ], + [ + [-0.2178654670715332, 0.7986540794372559, 0.9104702472686768, -0.08801817893981934], + [-0.036629438400268555, -0.4807589650154114, 0.3163014054298401, 0.3865722119808197], + [0.7336946129798889, 0.2510286271572113, 0.07699853926897049, -0.2063409984111786] + ] + ], + "target": [ + [ + [-0.047562163323163986, 0.5229573249816895, 0.9717304110527039, -0.27785417437553406], + [-0.6115978360176086, -0.5571882724761963, -0.9683470726013184, 0.8712791800498962], + [1.3595633506774902, 0.4334380030632019, -0.7171905636787415, 1.055369257926941] + ], + [ + [-1.4533969163894653, 0.46515071392059326, 0.37139150500297546, -0.004656785633414984], + [0.05607975274324417, 0.3781784772872925, 0.7051141262054443, -1.7236974239349365], + [-0.8434810638427734, 0.4351435601711273, 0.26588720083236694, -0.5870985388755798] + ] + ], + "expected_output": 0.7964643836021423, + "expected_grad_input": [ + [ + [0.0416666679084301, -0.0416666679084301, -0.0416666679084301, -0.0416666679084301], + [-0.0416666679084301, -0.0416666679084301, 0.0416666679084301, -0.0416666679084301], + [-0.0416666679084301, 0.0416666679084301, 0.0416666679084301, -0.0416666679084301] + ], + [ + [0.0416666679084301, 0.0416666679084301, 0.0416666679084301, -0.0416666679084301], + [-0.0416666679084301, -0.0416666679084301, -0.0416666679084301, 0.0416666679084301], + [0.0416666679084301, -0.0416666679084301, -0.0416666679084301, 0.0416666679084301] + ] + ] + }, + { + "test_name": "bce_1d", + "loss_type": "BCELoss", + "input": [0.9015220999717712, 0.5855253338813782, 0.4241449534893036, 0.46762359142303467, 0.14062806963920593], + "target": [0.258492648601532, 0.16423600912094116, 0.6211971044540405, 0.637805163860321, 0.7739548683166504], + "expected_output": 1.1154000759124756, + "expected_grad_input": [1.4485907554626465, 0.34718963503837585, -0.16135546565055847, -0.1367185115814209, -1.0481046438217163] + }, + { + "test_name": "bce_2d", + "loss_type": "BCELoss", + "input": [ + [0.6885789632797241, 0.5632960200309753, 0.5165387392044067, 0.21918706595897675], + [0.6609245538711548, 0.26889950037002563, 0.4938890039920807, 0.5620960593223572], + [0.1478312611579895, 0.46731826663017273, 0.8081178069114685, 0.46147868037223816] + ], + "target": [ + [0.5905491709709167, 0.7722692489624023, 0.9141846299171448, 0.04094696044921875], + [0.8343076109886169, 0.14735394716262817, 0.6872336268424988, 0.9231226444244385], + [0.5070211887359619, 0.9549044966697693, 0.07397425174713135, 0.30902040004730225] + ], + "expected_output": 0.7156786918640137, + "expected_grad_input": [ + [0.0380956269800663, -0.07079222798347473, -0.132693812251091, 0.08678851276636124], + [-0.06447289884090424, 0.05152176693081856, -0.06445783376693726, -0.12222739309072495], + [-0.23760251700878143, -0.16322611272335052, 0.39453938603401184, 0.05112287402153015] + ] + }, + { + "test_name": "ce_3x5", + "loss_type": "CrossEntropyLoss", + "input": [ + [-0.2853364050388336, -0.315053254365921, 0.5482434034347534, 0.03692331537604332, -0.8128055930137634], + [0.012067895382642746, -0.6397443413734436, 0.6415984630584717, -0.5963875651359558, 0.7354843616485596], + [0.0723138153553009, 1.8586984872817993, -0.3944927752017975, 0.7613471746444702, -1.7989355325698853] + ], + "target": [1, 3, 3], + "expected_output": 1.9497250318527222, + "expected_grad_input": [ + [0.05339653044939041, -0.2815002501010895, 0.12289440631866455, 0.0737001895904541, 0.03150910511612892], + [0.055523306131362915, 0.028933227062225342, 0.10420221090316772, -0.3031180799007416, 0.11445929855108261], + [0.03422117233276367, 0.20422637462615967, 0.02145671844482422, -0.26517197489738464, 0.005267707630991936] + ] + }, + { + "test_name": "ce_4x3", + "loss_type": "CrossEntropyLoss", + "input": [ + [-0.9317735433578491, 0.8195555806159973, -1.0077593326568604], + [1.0381484031677246, -0.05357737094163895, 0.184811532497406], + [-0.7205739617347717, 0.025681406259536743, -0.34994596242904663], + [-0.0623726025223732, -2.177246332168579, 0.2064162939786911] + ], + "target": [0, 2, 1, 2], + "expected_output": 1.2121622562408447, + "expected_grad_input": [ + [-0.21748638153076172, 0.187351793050766, 0.03013458102941513], + [0.14191420376300812, 0.04763162136077881, -0.18954582512378693], + [0.05485185235738754, -0.13431265950202942, 0.07946081459522247], + [0.10292184352874756, 0.012417369522154331, -0.11533921957015991] + ] + }, + { + "test_name": "ce_2x10", + "loss_type": "CrossEntropyLoss", + "input": [ + [-1.6916636228561401, -1.3838845491409302, 0.4868715703487396, -1.0020296573638916, -0.0007737990817986429, 0.1620604395866394, 1.195958137512207, -1.3061535358428955, -1.4039719104766846, 0.09526508301496506], + [-0.3658939301967621, 0.19605061411857605, -0.7174144983291626, 2.833967924118042, 1.9534740447998047, 2.0486814975738525, -1.0880382061004639, -2.0478978157043457, 1.0005890130996704, 0.6863341927528381] + ], + "target": [9, 8], + "expected_output": 2.4159698486328125, + "expected_grad_input": [ + [0.009667445905506611, 0.013151600956916809, 0.08539602160453796, 0.019267085939645767, 0.052439190447330475, 0.06171261891722679, 0.17353616654872894, 0.014214667491614819, 0.012890053912997246, -0.4422748386859894], + [0.008803067728877068, 0.015441288240253925, 0.006193991284817457, 0.2159317135810852, 0.08952055871486664, 0.09846250712871552, 0.004275735467672348, 0.001637378940358758, -0.46547845005989075, 0.025212211534380913] + ] + }, + { + "test_name": "nll_3x5", + "loss_type": "NLLLoss", + "input": [ + [-2.538830041885376, -1.288444995880127, -1.0781428813934326, -2.4460291862487793, -1.5210682153701782], + [-2.8672585487365723, -3.1622445583343506, -3.840073585510254, -0.33780384063720703, -1.7958425283432007], + [-1.3330196142196655, -4.022608280181885, -2.4171359539031982, -1.4570512771606445, -0.925545334815979] + ], + "target": [4, 4, 2], + "expected_output": 1.911348819732666, + "expected_grad_input": [ + [0.0, 0.0, 0.0, 0.0, -0.3333333432674408], + [0.0, 0.0, 0.0, 0.0, -0.3333333432674408], + [0.0, 0.0, -0.3333333432674408, 0.0, 0.0] + ] + }, + { + "test_name": "nll_4x3", + "loss_type": "NLLLoss", + "input": [ + [-0.9523531198501587, -1.8531824350357056, -0.7821307182312012], + [-0.5172839760780334, -2.267467737197876, -1.2030119895935059], + [-1.0592443943023682, -2.237569808959961, -0.6041024327278137], + [-1.309128999710083, -0.5687561631202698, -1.8096251487731934] + ], + "target": [1, 2, 1, 1], + "expected_output": 1.465630054473877, + "expected_grad_input": [ + [0.0, -0.25, 0.0], + [0.0, 0.0, -0.25], + [0.0, -0.25, 0.0], + [0.0, -0.25, 0.0] + ] + }, + { + "test_name": "nll_2x10", + "loss_type": "NLLLoss", + "input": [ + [-3.4082841873168945, -3.5746684074401855, -2.9533114433288574, -1.1421728134155273, -2.9593586921691895, -2.75557804107666, -2.7994468212127686, -2.795651435852051, -2.551769256591797, -1.3771406412124634], + [-4.477285385131836, -1.5400781631469727, -2.1832103729248047, -2.6879796981811523, -1.7384589910507202, -2.2898473739624023, -3.020587205886841, -1.8060259819030762, -3.24482798576355, -2.74190616607666] + ], + "target": [1, 0], + "expected_output": 4.02597713470459, + "expected_grad_input": [ + [0.0, -0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + ] + } + ], + "activations": [ + { + "test_name": "relu_1d", + "activation_type": "ReLU", + "input": [-0.212321937084198, -0.3378549814224243, -0.6185609698295593, -2.283259391784668, -0.8088300824165344], + "expected_output": [0.0, 0.0, 0.0, 0.0, 0.0], + "expected_grad_input": [0.0, 0.0, 0.0, 0.0, 0.0] + }, + { + "test_name": "relu_2d", + "activation_type": "ReLU", + "input": [ + [1.5088868141174316, 0.013797433115541935, 0.49824267625808716, -1.8627145290374756], + [0.1571427881717682, 0.6885417103767395, -0.2572212517261505, 0.07220080494880676], + [-0.9438130855560303, -0.2745254337787628, -1.0741472244262695, 0.6364355087280273] + ], + "expected_output": [ + [1.5088868141174316, 0.013797433115541935, 0.49824267625808716, 0.0], + [0.1571427881717682, 0.6885417103767395, 0.0, 0.07220080494880676], + [0.0, 0.0, 0.0, 0.6364355087280273] + ], + "expected_grad_input": [ + [1.0, 1.0, 1.0, 0.0], + [1.0, 1.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 1.0] + ] + }, + { + "test_name": "relu_3d", + "activation_type": "ReLU", + "input": [ + [ + [-0.8326117396354675, -0.8129478096961975, 0.9700341820716858, -0.6757722496986389], + [0.20425143837928772, -0.02647615596652031, -0.4137862026691437, 0.5184088349342346], + [1.9138009548187256, 0.3378446102142334, 0.12505611777305603, -0.7621514797210693] + ], + [ + [-1.190559983253479, 0.7756073474884033, -0.9437392950057983, 0.17304909229278564], + [-1.3610970973968506, 1.8018341064453125, -0.07434194535017014, -0.15664155781269073], + [-0.8708454966545105, -0.6410972476005554, 0.5373784899711609, 0.7817491888999939] + ] + ], + "expected_output": [ + [ + [0.0, 0.0, 0.9700341820716858, 0.0], + [0.20425143837928772, 0.0, 0.0, 0.5184088349342346], + [1.9138009548187256, 0.3378446102142334, 0.12505611777305603, 0.0] + ], + [ + [0.0, 0.7756073474884033, 0.0, 0.17304909229278564], + [0.0, 1.8018341064453125, 0.0, 0.0], + [0.0, 0.0, 0.5373784899711609, 0.7817491888999939] + ] + ], + "expected_grad_input": [ + [ + [0.0, 0.0, 1.0, 0.0], + [1.0, 0.0, 0.0, 1.0], + [1.0, 1.0, 1.0, 0.0] + ], + [ + [0.0, 1.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "sigmoid_1d", + "activation_type": "Sigmoid", + "input": [-0.15397712588310242, 0.20357346534729004, 1.035062551498413, 0.4778358042240143, -0.14360927045345306], + "expected_output": [0.4615815579891205, 0.550718367099762, 0.737896203994751, 0.6172367334365845, 0.4641593098640442], + "expected_grad_input": [0.248524010181427, 0.24742764234542847, 0.1934053897857666, 0.23625554144382477, 0.24871544539928436] + }, + { + "test_name": "sigmoid_2d", + "activation_type": "Sigmoid", + "input": [ + [-0.6778466701507568, -2.4395816326141357, 1.4930769205093384, 0.4015757441520691], + [0.25948184728622437, -1.2378422021865845, 1.4154608249664307, -0.764299750328064], + [-1.3940775394439697, -0.5358389616012573, -0.09588626027107239, -0.10300292819738388] + ], + "expected_output": [ + [0.3367420732975006, 0.08020377159118652, 0.8165396451950073, 0.5990661978721619], + [0.5645089149475098, 0.22481180727481842, 0.8046258091926575, 0.3177134692668915], + [0.1987575888633728, 0.3691560626029968, 0.4760468006134033, 0.4742719829082489] + ], + "expected_grad_input": [ + [0.22334685921669006, 0.07377112656831741, 0.14980265498161316, 0.24018588662147522], + [0.24583859741687775, 0.17427146434783936, 0.15720312297344208, 0.21677161753177643], + [0.15925301611423492, 0.2328798621892929, 0.2494262456893921, 0.2493380606174469] + ] + }, + { + "test_name": "sigmoid_3d", + "activation_type": "Sigmoid", + "input": [ + [ + [-0.09658124297857285, 1.541925072669983, -1.5147335529327393, 1.8517848253250122], + [-0.17016290128231049, -1.3860632181167603, 1.6512693166732788, -0.5156721472740173], + [-0.38542383909225464, 0.8100648522377014, 0.9594927430152893, 0.10350999981164932] + ], + [ + [0.8290349841117859, 2.0921294689178467, 0.7953095436096191, 0.2792847752571106], + [-0.7917478680610657, 0.47021403908729553, 0.09063850343227386, 1.7422553300857544], + [-1.2660012245178223, 0.38916081190109253, 0.34287506341934204, -1.4590637683868408] + ] + ], + "expected_output": [ + [ + [0.4758734703063965, 0.8237444162368774, 0.18023833632469177, 0.8643364906311035], + [0.4575616121292114, 0.20003697276115417, 0.839062511920929, 0.3738647997379303], + [0.40481942892074585, 0.692123293876648, 0.7230202555656433, 0.5258544087409973] + ], + [ + [0.6961508393287659, 0.8901358246803284, 0.6889702081680298, 0.5693708658218384], + [0.3117934763431549, 0.6154344081878662, 0.5226441025733948, 0.8509733080863953], + [0.21994255483150482, 0.5960806608200073, 0.584888756275177, 0.18861056864261627] + ] + ], + "expected_grad_input": [ + [ + [0.24941791594028473, 0.1451895534992218, 0.14775247871875763, 0.11725892126560211], + [0.2481989860534668, 0.16002216935157776, 0.1350366175174713, 0.2340899258852005], + [0.240940660238266, 0.21308864653110504, 0.20026196539402008, 0.2493315488100052] + ], + [ + [0.2115248441696167, 0.09779404103755951, 0.21429026126861572, 0.2451876848936081], + [0.214578315615654, 0.23667490482330322, 0.2494872510433197, 0.1268177330493927], + [0.17156782746315002, 0.24076850712299347, 0.24279390275478363, 0.15303662419319153] + ] + ] + }, + { + "test_name": "leaky_relu_default", + "activation_type": "LeakyReLU", + "input": [1.489150047302246, 1.5290449857711792, 2.3474972248077393, 0.9470279216766357, 0.3205665051937103], + "expected_output": [1.489150047302246, 1.5290449857711792, 2.3474972248077393, 0.9470279216766357, 0.3205665051937103], + "expected_grad_input": [1.0, 1.0, 1.0, 1.0, 1.0] + }, + { + "test_name": "leaky_relu_slope_0_2", + "activation_type": "LeakyReLU", + "input": [ + [-0.4337339401245117, 0.9478998780250549, 0.5268871784210205, -1.6986902952194214], + [-0.06468385457992554, 0.849978506565094, -0.1342061311006546, -0.21423566341400146], + [0.00400538882240653, 0.055701903998851776, 0.3431284427642822, -0.1950915902853012] + ], + "expected_output": [ + [-0.08674678951501846, 0.9478998780250549, 0.5268871784210205, -0.33973807096481323], + [-0.012936770915985107, 0.849978506565094, -0.02684122696518898, -0.04284713417291641], + [0.00400538882240653, 0.055701903998851776, 0.3431284427642822, -0.03901831805706024] + ], + "expected_grad_input": [ + [0.20000000298023224, 1.0, 1.0, 0.20000000298023224], + [0.20000000298023224, 1.0, 0.20000000298023224, 0.20000000298023224], + [1.0, 1.0, 1.0, 0.20000000298023224] + ], + "kwargs": { + "negative_slope": 0.2 + } + }, + { + "test_name": "leaky_relu_3d", + "activation_type": "LeakyReLU", + "input": [ + [ + [-1.2593692541122437, -0.2130700796842575, 0.34436315298080444, -3.1016058921813965], + [-1.4587225914001465, -1.4318257570266724, -0.6071268916130066, -0.25973787903785706], + [0.4159761369228363, 1.9025356769561768, -1.3524627685546875, -0.13028714060783386] + ], + [ + [1.7551294565200806, 0.06751081347465515, -0.39783358573913574, 0.758330225944519], + [0.6833979487419128, -0.13825182616710663, 0.9863895177841187, -0.3892551064491272], + [0.6138074398040771, -0.2786312699317932, 0.5885060429573059, 0.7091160416603088] + ] + ], + "expected_output": [ + [ + [-0.12593692541122437, -0.02130700834095478, 0.34436315298080444, -0.3101606070995331], + [-0.14587226510047913, -0.14318257570266724, -0.06071269139647484, -0.025973787531256676], + [0.4159761369228363, 1.9025356769561768, -0.13524627685546875, -0.013028713874518871] + ], + [ + [1.7551294565200806, 0.06751081347465515, -0.039783358573913574, 0.758330225944519], + [0.6833979487419128, -0.013825182802975178, 0.9863895177841187, -0.03892550989985466], + [0.6138074398040771, -0.02786312811076641, 0.5885060429573059, 0.7091160416603088] + ] + ], + "expected_grad_input": [ + [ + [0.10000000149011612, 0.10000000149011612, 1.0, 0.10000000149011612], + [0.10000000149011612, 0.10000000149011612, 0.10000000149011612, 0.10000000149011612], + [1.0, 1.0, 0.10000000149011612, 0.10000000149011612] + ], + [ + [1.0, 1.0, 0.10000000149011612, 1.0], + [1.0, 0.10000000149011612, 1.0, 0.10000000149011612], + [1.0, 0.10000000149011612, 1.0, 1.0] + ] + ], + "kwargs": { + "negative_slope": 0.1 + } + } + ], + "cat": [ + { + "test_name": "cat_dim0_3_4", + "inputs": [ + [-0.9581949710845947, -0.5657975077629089, 0.4133423864841461], + [0.0831180214881897, 0.23808503150939941, 0.6678999662399292, 2.3862266540527344] + ], + "dim": 0, + "expected_output": [-0.9581949710845947, -0.5657975077629089, 0.4133423864841461, 0.0831180214881897, 0.23808503150939941, 0.6678999662399292, 2.3862266540527344], + "expected_grads": [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + }, + { + "test_name": "cat_dim0_3_4_2", + "inputs": [ + [-0.6914089322090149, -0.2784762978553772, -1.9055553674697876], + [-0.05876833572983742, 0.042250145226716995, -1.0124346017837524, 0.5864773392677307], + [0.6910783052444458, 0.5360009074211121] + ], + "dim": 0, + "expected_output": [-0.6914089322090149, -0.2784762978553772, -1.9055553674697876, -0.05876833572983742, 0.042250145226716995, -1.0124346017837524, 0.5864773392677307, 0.6910783052444458, 0.5360009074211121], + "expected_grads": [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0] + ] + }, + { + "test_name": "cat_dim0_2x3_4x3", + "inputs": [ + [ + [0.3768846392631531, -0.43834608793258667, 1.1436107158660889], + [1.3469657897949219, -1.848899483680725, -0.9097157120704651] + ], + [ + [-0.7036322951316833, 0.3036259412765503, 1.4873687028884888], + [0.17053554952144623, 0.03948281332850456, -1.937150478363037], + [-0.43305763602256775, -0.21643993258476257, 0.6168283224105835], + [0.4436769485473633, -0.16708824038505554, -0.09850939363241196] + ] + ], + "dim": 0, + "expected_output": [ + [0.3768846392631531, -0.43834608793258667, 1.1436107158660889], + [1.3469657897949219, -1.848899483680725, -0.9097157120704651], + [-0.7036322951316833, 0.3036259412765503, 1.4873687028884888], + [0.17053554952144623, 0.03948281332850456, -1.937150478363037], + [-0.43305763602256775, -0.21643993258476257, 0.6168283224105835], + [0.4436769485473633, -0.16708824038505554, -0.09850939363241196] + ], + "expected_grads": [ + [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "cat_dim1_2x3_2x4", + "inputs": [ + [ + [0.4775729477405548, 0.8609507083892822, -1.3283056020736694], + [-0.9946742653846741, -0.769915759563446, -0.9055580496788025] + ], + [ + [1.228192925453186, 0.6268085241317749, 0.8738096952438354, -0.19481602311134338], + [1.1477203369140625, -0.644938051700592, 1.34561026096344, 0.0802030935883522] + ] + ], + "dim": 1, + "expected_output": [ + [0.4775729477405548, 0.8609507083892822, -1.3283056020736694, 1.228192925453186, 0.6268085241317749, 0.8738096952438354, -0.19481602311134338], + [-0.9946742653846741, -0.769915759563446, -0.9055580496788025, 1.1477203369140625, -0.644938051700592, 1.34561026096344, 0.0802030935883522] + ], + "expected_grads": [ + [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + ] + }, + { + "test_name": "cat_dim0_2x3x4_5x3x4", + "inputs": [ + [ + [ + [1.875388741493225, 1.1042989492416382, 1.1282050609588623, -0.17131087183952332], + [0.23035314679145813, -0.36578282713890076, -0.9600357413291931, -0.26212170720100403], + [-0.5897102952003479, -0.14395228028297424, 0.05182264745235443, -0.3284812867641449] + ], + [ + [-2.247206211090088, -0.4478967487812042, 0.4234687089920044, -0.3874586224555969], + [0.7004079222679138, 1.7929826974868774, 0.8702965974807739, -1.0552809238433838], + [-1.3284013271331787, 0.7060741186141968, 0.35730111598968506, 0.5892837643623352] + ] + ], + [ + [ + [0.26021307706832886, -0.2455366551876068, 0.24650610983371735, 0.1328691989183426], + [0.12191462516784668, 0.47808775305747986, 0.2761341631412506, -0.5895728468894958], + [0.7838366031646729, 0.8732404708862305, -0.19896702468395233, -1.3615714311599731] + ], + [ + [-0.5193602442741394, 0.07648162543773651, 0.34005025029182434, 1.4557304382324219], + [-1.2438069581985474, -1.0805374383926392, -0.447700172662735, -0.7288169264793396], + [-0.16066236793994904, -0.32063713669776917, -0.6307737827301025, -0.788766622543335] + ], + [ + [-1.8269540071487427, 0.659221351146698, -0.26273947954177856, 0.9314952492713928], + [-0.4593467116355896, -0.9419456720352173, -0.7089186310768127, 2.1860759258270264], + [0.05665723606944084, 0.9532074928283691, 0.8520749807357788, -1.6946725845336914] + ], + [ + [1.1805996894836426, -2.8929238319396973, -0.3875778615474701, -0.7124031782150269], + [-2.284684181213379, -0.45604968070983887, 0.051366694271564484, 0.6950237154960632], + [-0.5081791877746582, 1.1477830410003662, 0.24005727469921112, -0.7906641364097595] + ], + [ + [-0.8132007122039795, 0.1414821743965149, 0.4927920699119568, -0.5128253102302551], + [-0.5407329201698303, 0.7841619849205017, 0.6991406679153442, -0.036269914358854294], + [-0.37975940108299255, -0.8534544110298157, 0.6674330234527588, -0.8035953640937805] + ] + ] + ], + "dim": 0, + "expected_output": [ + [ + [1.875388741493225, 1.1042989492416382, 1.1282050609588623, -0.17131087183952332], + [0.23035314679145813, -0.36578282713890076, -0.9600357413291931, -0.26212170720100403], + [-0.5897102952003479, -0.14395228028297424, 0.05182264745235443, -0.3284812867641449] + ], + [ + [-2.247206211090088, -0.4478967487812042, 0.4234687089920044, -0.3874586224555969], + [0.7004079222679138, 1.7929826974868774, 0.8702965974807739, -1.0552809238433838], + [-1.3284013271331787, 0.7060741186141968, 0.35730111598968506, 0.5892837643623352] + ], + [ + [0.26021307706832886, -0.2455366551876068, 0.24650610983371735, 0.1328691989183426], + [0.12191462516784668, 0.47808775305747986, 0.2761341631412506, -0.5895728468894958], + [0.7838366031646729, 0.8732404708862305, -0.19896702468395233, -1.3615714311599731] + ], + [ + [-0.5193602442741394, 0.07648162543773651, 0.34005025029182434, 1.4557304382324219], + [-1.2438069581985474, -1.0805374383926392, -0.447700172662735, -0.7288169264793396], + [-0.16066236793994904, -0.32063713669776917, -0.6307737827301025, -0.788766622543335] + ], + [ + [-1.8269540071487427, 0.659221351146698, -0.26273947954177856, 0.9314952492713928], + [-0.4593467116355896, -0.9419456720352173, -0.7089186310768127, 2.1860759258270264], + [0.05665723606944084, 0.9532074928283691, 0.8520749807357788, -1.6946725845336914] + ], + [ + [1.1805996894836426, -2.8929238319396973, -0.3875778615474701, -0.7124031782150269], + [-2.284684181213379, -0.45604968070983887, 0.051366694271564484, 0.6950237154960632], + [-0.5081791877746582, 1.1477830410003662, 0.24005727469921112, -0.7906641364097595] + ], + [ + [-0.8132007122039795, 0.1414821743965149, 0.4927920699119568, -0.5128253102302551], + [-0.5407329201698303, 0.7841619849205017, 0.6991406679153442, -0.036269914358854294], + [-0.37975940108299255, -0.8534544110298157, 0.6674330234527588, -0.8035953640937805] + ] + ], + "expected_grads": [ + [ + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + ], + [ + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + ] + ] + }, + { + "test_name": "cat_dim1_2x3x4_2x1x4", + "inputs": [ + [ + [ + [-0.41035687923431396, 0.46809425950050354, -0.2346276491880417, 0.27824410796165466], + [-0.00012936530401930213, -0.12449756264686584, -1.2247875928878784, 0.962885856628418], + [1.1861584186553955, -1.2203160524368286, 0.2909986078739166, -0.07964225858449936] + ], + [ + [1.3200364112854004, -1.5196866989135742, -1.253058671951294, -0.20159688591957092], + [-0.10875027626752853, 0.608341634273529, 0.7894347310066223, 0.7824702858924866], + [-0.06465863436460495, -0.0002302070497535169, 0.569308340549469, 0.7476243376731873] + ] + ], + [ + [ + [1.7699416875839233, -0.5576956272125244, -0.4302156865596771, 0.23022237420082092] + ], + [ + [-2.061005115509033, -0.31494468450546265, 0.29385510087013245, -0.275235116481781] + ] + ] + ], + "dim": 1, + "expected_output": [ + [ + [-0.41035687923431396, 0.46809425950050354, -0.2346276491880417, 0.27824410796165466], + [-0.00012936530401930213, -0.12449756264686584, -1.2247875928878784, 0.962885856628418], + [1.1861584186553955, -1.2203160524368286, 0.2909986078739166, -0.07964225858449936], + [1.7699416875839233, -0.5576956272125244, -0.4302156865596771, 0.23022237420082092] + ], + [ + [1.3200364112854004, -1.5196866989135742, -1.253058671951294, -0.20159688591957092], + [-0.10875027626752853, 0.608341634273529, 0.7894347310066223, 0.7824702858924866], + [-0.06465863436460495, -0.0002302070497535169, 0.569308340549469, 0.7476243376731873], + [-2.061005115509033, -0.31494468450546265, 0.29385510087013245, -0.275235116481781] + ] + ], + "expected_grads": [ + [ + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + ], + [ + [ + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0] + ] + ] + ] + }, + { + "test_name": "cat_dim2_2x3x4_2x3x2", + "inputs": [ + [ + [ + [0.37119555473327637, 1.5117958784103394, -0.8914596438407898, 0.5247467160224915], + [0.3517809212207794, 0.2491273283958435, 0.04235552251338959, 0.896659791469574], + [-1.2784851789474487, -1.269239068031311, -0.5266082286834717, -0.48970502614974976] + ], + [ + [-0.15184549987316132, 3.1874823570251465, -0.2933650016784668, 1.397810459136963], + [-0.9303572773933411, -1.3949681520462036, 0.2753496468067169, -1.7002266645431519], + [0.5674450993537903, 0.5813174247741699, 0.7705280780792236, -1.1304327249526978] + ] + ], + [ + [ + [1.1466082334518433, -0.39723342657089233], + [-1.9603017568588257, 1.5411626100540161], + [0.48273351788520813, 0.6697028875350952] + ], + [ + [0.01900755986571312, -0.0792299285531044], + [0.505814254283905, 0.7093597054481506], + [1.5069438219070435, -0.5037926435470581] + ] + ] + ], + "dim": 2, + "expected_output": [ + [ + [0.37119555473327637, 1.5117958784103394, -0.8914596438407898, 0.5247467160224915, 1.1466082334518433, -0.39723342657089233], + [0.3517809212207794, 0.2491273283958435, 0.04235552251338959, 0.896659791469574, -1.9603017568588257, 1.5411626100540161], + [-1.2784851789474487, -1.269239068031311, -0.5266082286834717, -0.48970502614974976, 0.48273351788520813, 0.6697028875350952] + ], + [ + [-0.15184549987316132, 3.1874823570251465, -0.2933650016784668, 1.397810459136963, 0.01900755986571312, -0.0792299285531044], + [-0.9303572773933411, -1.3949681520462036, 0.2753496468067169, -1.7002266645431519, 0.505814254283905, 0.7093597054481506], + [0.5674450993537903, 0.5813174247741699, 0.7705280780792236, -1.1304327249526978, 1.5069438219070435, -0.5037926435470581] + ] + ], + "expected_grads": [ + [ + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + ], + [ + [ + [1.0, 1.0], + [1.0, 1.0], + [1.0, 1.0] + ], + [ + [1.0, 1.0], + [1.0, 1.0], + [1.0, 1.0] + ] + ] + ] + }, + { + "test_name": "cat_dim-1_2x3x4_2x3x4", + "inputs": [ + [ + [ + [-0.015211731195449829, 0.5427215099334717, 0.12508316338062286, -0.8761705756187439], + [1.2222594022750854, 0.32681646943092346, -0.7020950317382812, -0.6732766032218933], + [-2.0726318359375, -2.1240861415863037, -0.42500147223472595, -2.2850937843322754] + ], + [ + [0.5908071398735046, 0.31265148520469666, 0.10613418370485306, -0.3067088723182678], + [0.29792049527168274, 0.8381712436676025, 0.2301393747329712, -0.5780777335166931], + [-0.02522311918437481, -1.2018290758132935, -0.5614521503448486, -0.9464563131332397] + ] + ], + [ + [ + [-2.0851516723632812, -0.8552408218383789, -1.0997719764709473, 0.6193552613258362], + [-0.18342575430870056, 0.3666188418865204, 1.01323664188385, 0.6346396803855896], + [0.8483667373657227, 2.0354838371276855, 0.3772087097167969, 0.48434850573539734] + ], + [ + [-0.030398759990930557, 1.0925219058990479, 1.4625015258789062, -1.8284146785736084], + [-0.22143854200839996, 2.274590015411377, -0.783242404460907, -0.26778313517570496], + [1.5684525966644287, -0.283514142036438, -1.465786099433899, -0.06292983889579773] + ] + ] + ], + "dim": -1, + "expected_output": [ + [ + [-0.015211731195449829, 0.5427215099334717, 0.12508316338062286, -0.8761705756187439, -2.0851516723632812, -0.8552408218383789, -1.0997719764709473, 0.6193552613258362], + [1.2222594022750854, 0.32681646943092346, -0.7020950317382812, -0.6732766032218933, -0.18342575430870056, 0.3666188418865204, 1.01323664188385, 0.6346396803855896], + [-2.0726318359375, -2.1240861415863037, -0.42500147223472595, -2.2850937843322754, 0.8483667373657227, 2.0354838371276855, 0.3772087097167969, 0.48434850573539734] + ], + [ + [0.5908071398735046, 0.31265148520469666, 0.10613418370485306, -0.3067088723182678, -0.030398759990930557, 1.0925219058990479, 1.4625015258789062, -1.8284146785736084], + [0.29792049527168274, 0.8381712436676025, 0.2301393747329712, -0.5780777335166931, -0.22143854200839996, 2.274590015411377, -0.783242404460907, -0.26778313517570496], + [-0.02522311918437481, -1.2018290758132935, -0.5614521503448486, -0.9464563131332397, 1.5684525966644287, -0.283514142036438, -1.465786099433899, -0.06292983889579773] + ] + ], + "expected_grads": [ + [ + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + ], + [ + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 1.0] + ] + ] + ] + }, + { + "test_name": "cat_dim0_2x3_2x3_2x3", + "inputs": [ + [ + [1.078231692314148, -0.5189271569252014, 0.8384542465209961], + [2.2189595699310303, -0.4039728343486786, 0.1526586413383484] + ], + [ + [0.4045778512954712, -0.14083661139011383, 0.4409853518009186], + [0.12938851118087769, -1.9698935747146606, 0.8400644063949585] + ], + [ + [-0.026187019422650337, -1.6108719110488892, -0.20987890660762787], + [0.6057907938957214, -0.7759561538696289, -0.17414551973342896] + ] + ], + "dim": 0, + "expected_output": [ + [1.078231692314148, -0.5189271569252014, 0.8384542465209961], + [2.2189595699310303, -0.4039728343486786, 0.1526586413383484], + [0.4045778512954712, -0.14083661139011383, 0.4409853518009186], + [0.12938851118087769, -1.9698935747146606, 0.8400644063949585], + [-0.026187019422650337, -1.6108719110488892, -0.20987890660762787], + [0.6057907938957214, -0.7759561538696289, -0.17414551973342896] + ], + "expected_grads": [ + [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ], + [ + [1.0, 1.0, 1.0], + [1.0, 1.0, 1.0] + ] + ] + } + ], + "softmax": [ + { + "test_name": "softmax_1d_dim0", + "dim": 0, + "input": [0.22354303300380707, -0.4713112711906433, 2.367802619934082, -0.13596417009830475, -1.654117226600647], + "expected_output": [0.09186249226331711, 0.0458529070019722, 0.7841125130653381, 0.06412187963724136, 0.014050145633518696], + "expected_grad": [5.475431219537086e-09, 2.7330462337715744e-09, 4.673674780519832e-08, 3.821961858108125e-09, 8.374539395283875e-10] + }, + { + "test_name": "softmax_2d_dim0", + "dim": 0, + "input": [ + [1.4844046831130981, 0.13370101153850555, -1.1498037576675415, -0.5602720975875854], + [1.704857587814331, -0.9016464948654175, -0.49271222949028015, -0.20682217180728912], + [-0.4253191351890564, -0.2249564677476883, -0.2063286155462265, -0.17618677020072937] + ], + "expected_output": [ + [0.41757795214653015, 0.48692208528518677, 0.1818821132183075, 0.25692132115364075], + [0.5205698609352112, 0.1729074865579605, 0.35088208317756653, 0.3658486604690552], + [0.06185217946767807, 0.34017038345336914, 0.4672357141971588, 0.37723004817962646] + ], + "expected_grad": [ + [2.4889585503728995e-08, 2.9022817926716016e-08, 2.1682037498749196e-08, 0.0], + [3.102838164181776e-08, 1.0306089315292866e-08, 4.182840385169584e-08, 0.0], + [3.686677185754661e-09, 2.0275734868846484e-08, 5.569883754219518e-08, 0.0] + ] + }, + { + "test_name": "softmax_2d_dim1", + "dim": 1, + "input": [ + [-0.7785865664482117, 0.463659405708313, -1.4308651685714722, 0.22588877379894257], + [-0.6729050874710083, 0.05861504003405571, -0.14907807111740112, 1.9486465454101562], + [-0.07112552225589752, -0.15058763325214386, -0.2374127358198166, -0.13244783878326416] + ], + "expected_output": [ + [0.1296224296092987, 0.44893211126327515, 0.0675148218870163, 0.35393065214157104], + [0.05398470535874367, 0.1121930330991745, 0.09115192294120789, 0.7426703572273254], + [0.2694737911224365, 0.2488895058631897, 0.22819119691848755, 0.25344550609588623] + ], + "expected_grad": [ + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0] + ] + }, + { + "test_name": "softmax_3d_dim1", + "dim": 1, + "input": [ + [ + [-2.330476999282837, -0.7808834314346313, 0.8250064849853516, 1.2206652164459229], + [-0.06297583878040314, 1.1463638544082642, 0.5553734302520752, -0.5014337301254272], + [-0.44963574409484863, -0.40768420696258545, 0.9110543131828308, -0.6653339862823486] + ], + [ + [-1.299545168876648, -0.4152938425540924, -0.5165784358978271, -0.6881742477416992], + [2.848527193069458, -0.7424169778823853, 0.3712611794471741, 0.35424429178237915], + [0.0905831828713417, 1.3524107933044434, -1.1338030099868774, -0.14128278195858002] + ] + ], + "expected_output": [ + [ + [0.05809136480093002, 0.10726223886013031, 0.35044458508491516, 0.7516716718673706], + [0.5608862638473511, 0.7369529008865356, 0.26762038469314575, 0.13431672751903534], + [0.3810223937034607, 0.15578486025333405, 0.3819350004196167, 0.11401163041591644] + ], + [ + [0.014635459519922733, 0.13195419311523438, 0.25193271040916443, 0.17972871661186218], + [0.9265977740287781, 0.0951383113861084, 0.6121648550033569, 0.5097227692604065], + [0.05876670405268669, 0.7729074954986572, 0.13590246438980103, 0.31054845452308655] + ] + ], + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0] + ], + [ + [8.723413658096035e-10, 0.0, 0.0, 2.1425332619173787e-08], + [5.522953117065299e-08, 0.0, 0.0, 6.076368919138986e-08], + [3.502768519680899e-09, 0.0, 0.0, 3.702026063479025e-08] + ] + ] + }, + { + "test_name": "softmax_3d_dim2", + "dim": 2, + "input": [ + [ + [1.930862545967102, -0.746330976486206, -0.011143443174660206, 1.8151899576187134], + [0.6066091060638428, 0.5963938236236572, 0.5361921191215515, 1.2350000143051147], + [1.9413354396820068, -1.163609266281128, -1.5966553688049316, 0.08320564776659012] + ], + [ + [-0.9222075343132019, -0.3711417317390442, 1.8695621490478516, -0.6497343182563782], + [0.7250988483428955, -1.3895257711410522, 1.1874427795410156, 0.027558235451579094], + [2.0010547637939453, -0.12460697442293167, -1.5580800771713257, 0.939643383026123] + ] + ], + "expected_output": [ + [ + [0.47552505135536194, 0.03269512206315994, 0.06819789111614227, 0.42358192801475525], + [0.20848824083805084, 0.2063693255186081, 0.19431209564208984, 0.3908303380012512], + [0.8130989074707031, 0.03644878417253494, 0.023638151586055756, 0.12681420147418976] + ], + [ + [0.049120329320430756, 0.08522876352071762, 0.8011456727981567, 0.06450523436069489], + [0.3118881285190582, 0.037638213485479355, 0.49521324038505554, 0.15526042878627777], + [0.669440507888794, 0.07990036904811859, 0.019054576754570007, 0.2316044420003891] + ] + ], + "expected_grad": [ + [ + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [3.990176367096865e-08, 4.762433114535725e-09, 1.1357412788015608e-09, 1.3804700493835753e-08] + ] + ] + } + ], + "clamp": [ + { + "test_name": "clamp_1d", + "min": -0.5, + "max": 0.5, + "input": [0.602070689201355, -0.382646381855011, 0.37847667932510376, -1.1653333902359009, 1.1664279699325562], + "expected_output": [0.5, -0.382646381855011, 0.37847667932510376, -0.5, 0.5], + "expected_grad": [0.0, 1.0, 1.0, 0.0, 0.0] + }, + { + "test_name": "clamp_2d", + "min": 0.0, + "max": 1.0, + "input": [ + [0.6326690316200256, 0.7139781713485718, -0.3457438051700592, 1.7379117012023926], + [-0.5848365426063538, 0.23770400881767273, -0.8485972881317139, -0.8261227607727051], + [-0.4898433983325958, -2.3711392879486084, 1.1906901597976685, -1.298050880432129] + ], + "expected_output": [ + [0.6326690316200256, 0.7139781713485718, 0.0, 1.0], + [0.0, 0.23770400881767273, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0] + ], + "expected_grad": [ + [1.0, 1.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0] + ] + }, + { + "test_name": "clamp_3d", + "min": -1.0, + "max": 1.0, + "input": [ + [ + [1.0166347026824951, -0.794191300868988, 0.36742645502090454, 0.42930465936660767], + [2.0288591384887695, -0.7683921456336975, 0.309842973947525, -2.6218323707580566], + [0.9455360174179077, 0.03766324743628502, 1.6522549390792847, -0.4681110680103302] + ], + [ + [-1.3070950508117676, -0.2728694975376129, -0.34847185015678406, 0.07418181002140045], + [-0.7247031331062317, 0.37256067991256714, 0.560422420501709, -2.4270148277282715], + [-0.25895750522613525, 0.4478885233402252, 1.3401062488555908, 1.5108956098556519] + ] + ], + "expected_output": [ + [ + [1.0, -0.794191300868988, 0.36742645502090454, 0.42930465936660767], + [1.0, -0.7683921456336975, 0.309842973947525, -1.0], + [0.9455360174179077, 0.03766324743628502, 1.0, -0.4681110680103302] + ], + [ + [-1.0, -0.2728694975376129, -0.34847185015678406, 0.07418181002140045], + [-0.7247031331062317, 0.37256067991256714, 0.560422420501709, -1.0], + [-0.25895750522613525, 0.4478885233402252, 1.0, 1.0] + ] + ], + "expected_grad": [ + [ + [0.0, 1.0, 1.0, 1.0], + [0.0, 1.0, 1.0, 0.0], + [1.0, 1.0, 0.0, 1.0] + ], + [ + [0.0, 1.0, 1.0, 1.0], + [1.0, 1.0, 1.0, 0.0], + [1.0, 1.0, 0.0, 0.0] + ] + ] + }, + { + "test_name": "clamp_negative_range", + "min": -2.0, + "max": -0.5, + "input": [0.15676277875900269, 2.0731828212738037, -1.6112782955169678, -0.06791739165782928], + "expected_output": [-0.5, -0.5, -1.6112782955169678, -0.5], + "expected_grad": [0.0, 0.0, 1.0, 0.0] + } + ], + "maxpool": [ + { + "test_name": "maxpool_basic_2x2", + "kernel_size": 2, + "stride": 2, + "padding": 0, + "input": [ + [ + [ + [0.18275369703769684, 0.6246524453163147, -0.7939775586128235, -0.674835205078125], + [-0.3876877427101135, 0.44965043663978577, -1.0259219408035278, 1.6894441843032837], + [0.26085028052330017, 1.4177610874176025, 0.6738032102584839, 1.4665507078170776], + [-1.1077474355697632, -0.7443782091140747, 0.35882535576820374, -0.2917172610759735] + ] + ] + ], + "expected_output": [ + [ + [ + [0.6246524453163147, 1.6894441843032837], + [1.4177610874176025, 1.4665507078170776] + ] + ] + ], + "expected_grad": [ + [ + [ + [0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0] + ] + ] + ] + }, + { + "test_name": "maxpool_stride_1", + "kernel_size": 2, + "stride": 1, + "padding": 0, + "input": [ + [ + [ + [0.2806217670440674, 0.6909231543540955, 0.7183824777603149, -0.5719326138496399], + [-0.46663370728492737, 0.1017654687166214, -0.11941836029291153, 1.3993206024169922], + [-0.7805798053741455, -0.1343953162431717, -0.3609391152858734, 0.104627326130867], + [-0.325872004032135, 0.3189202845096588, -0.3206574320793152, 0.18195369839668274] + ], + [ + [-1.4932167530059814, 0.5237964987754822, 0.7530690431594849, -0.2219216674566269], + [0.5819101333618164, -1.9369032382965088, 0.7780604362487793, -1.4069199562072754], + [-0.6577823162078857, -1.2317392826080322, -1.2463988065719604, -1.4996418952941895], + [-0.5403968691825867, 1.2409895658493042, -1.4416067600250244, -1.2435057163238525] + ] + ] + ], + "expected_output": [ + [ + [ + [0.6909231543540955, 0.7183824777603149, 1.3993206024169922], + [0.1017654687166214, 0.1017654687166214, 1.3993206024169922], + [0.3189202845096588, 0.3189202845096588, 0.18195369839668274] + ], + [ + [0.5819101333618164, 0.7780604362487793, 0.7780604362487793], + [0.5819101333618164, 0.7780604362487793, 0.7780604362487793], + [1.2409895658493042, 1.2409895658493042, -1.2435057163238525] + ] + ] + ], + "expected_grad": [ + [ + [ + [0.0, 1.0, 1.0, 0.0], + [0.0, 2.0, 0.0, 2.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 2.0, 0.0, 1.0] + ], + [ + [0.0, 0.0, 0.0, 0.0], + [2.0, 0.0, 4.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 2.0, 0.0, 1.0] + ] + ] + ] + }, + { + "test_name": "maxpool_padding_1", + "kernel_size": 3, + "stride": 1, + "padding": 1, + "input": [ + [ + [ + [1.3967915773391724, 0.9178156852722168, 0.5120382905006409, -0.8405776023864746], + [-1.0445209741592407, 0.5547724366188049, -1.1883097887039185, -0.5416104793548584], + [-1.1297553777694702, -2.800556182861328, 1.2796905040740967, 0.2199985533952713], + [0.3249095380306244, 1.319007396697998, -0.5861101150512695, 0.7809817790985107] + ] + ] + ], + "expected_output": [ + [ + [ + [1.3967915773391724, 1.3967915773391724, 0.9178156852722168, 0.5120382905006409], + [1.3967915773391724, 1.3967915773391724, 1.2796905040740967, 1.2796905040740967], + [1.319007396697998, 1.319007396697998, 1.319007396697998, 1.2796905040740967], + [1.319007396697998, 1.319007396697998, 1.319007396697998, 1.2796905040740967] + ] + ] + ], + "expected_grad": [ + [ + [ + [4.0, 1.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 4.0, 0.0], + [0.0, 6.0, 0.0, 0.0] + ] + ] + ] + }, + { + "test_name": "maxpool_multichannel", + "kernel_size": 2, + "stride": 2, + "padding": 0, + "input": [ + [ + [ + [-0.20516234636306763, -0.7811664342880249, 0.6872723698616028, 0.7835897207260132, -1.1108732223510742, -3.106328010559082], + [-0.5377766489982605, -0.4870067834854126, -0.7153372168540955, -0.467404842376709, 0.551419734954834, 2.654942035675049], + [1.0582451820373535, -0.1468161642551422, -1.318347454071045, -1.2894185781478882, 1.9681813716888428, -0.7403607368469238], + [-0.8665743470191956, -0.30639445781707764, -0.5359372496604919, -0.35750812292099, -1.4471426010131836, -1.6966946125030518], + [-0.8158796429634094, 1.1372771263122559, 0.21925963461399078, 0.4133651852607727, 0.0061524491757154465, -0.5672794580459595], + [0.10004822909832001, -0.2485211342573166, -1.2868070602416992, -1.3662828207015991, -0.04625223949551582, -0.6149584650993347] + ], + [ + [1.2366944551467896, -0.8143561482429504, -0.7984835505485535, 0.5182347893714905, -0.036672789603471756, 0.6718097925186157], + [0.9242297410964966, 0.26971569657325745, 0.628537118434906, -0.7066188454627991, -0.03767240047454834, -1.4856157302856445], + [-0.5659265518188477, 0.3840969204902649, -0.7815778851509094, -0.15094131231307983, 0.4199317395687103, 1.4059346914291382], + [1.3185068368911743, 2.849227712431457e-05, -1.0952261686325073, 1.1855055093765259, 1.1880545616149902, 2.055619716644287], + [0.6602651476860046, -1.1078379154205322, 0.45688942074775696, -0.6034095287322998, -0.3147018849849701, -0.41328999400138855], + [0.3946205973625183, 1.1304746866226196, 0.8258382678031921, 0.9458276629447937, -1.1293034553527832, -1.6514161825180054] + ], + [ + [-0.059470854699611664, -0.9928666353225708, 1.1634254455566406, 1.609459400177002, -0.29416555166244507, 1.0819309949874878], + [0.6690162420272827, 0.6591296792030334, -0.27264782786369324, 0.9804211258888245, -0.17533333599567413, -0.12276917695999146], + [0.7414069175720215, 0.35395190119743347, -1.4793612957000732, -0.7297910451889038, -2.9890658855438232, 0.18370741605758667], + [-0.4726978540420532, -0.958824634552002, -1.5123530626296997, 1.5067633390426636, 1.6462355852127075, -0.6936932802200317], + [0.21946865320205688, -0.439242959022522, -0.1391070932149887, -0.018692156299948692, 1.6560721397399902, 1.0661178827285767], + [-1.1016371250152588, 1.3902591466903687, 0.514220118522644, -0.15104348957538605, 0.13773603737354279, 1.2250828742980957] + ] + ], + [ + [ + [-0.7642602920532227, 0.9183834791183472, -0.5463470220565796, 0.5569576621055603, 0.1281542032957077, -0.19802889227867126], + [-1.478035569190979, -0.5910253524780273, 0.8357481956481934, -0.22924941778182983, 1.6714451313018799, -0.4941222369670868], + [-1.1415528059005737, 0.7821402549743652, 0.010817415080964565, 0.3816293179988861, -1.652677059173584, -0.38139888644218445], + [0.6544674038887024, -0.14682821929454803, 0.08301469683647156, 0.712009072303772, -0.9005926847457886, 0.8906894326210022], + [0.47655177116394043, -0.8396266102790833, -0.4006587862968445, -1.5424505472183228, -0.5745509266853333, -1.9059150218963623], + [-0.9665390253067017, 0.367727130651474, -0.5785751342773438, 1.2373405694961548, 0.5358533263206482, 0.5413376092910767] + ], + [ + [1.2400078773498535, -0.905766487121582, 0.768028736114502, 1.62221360206604, 0.08158037811517715, 0.20281589031219482], + [-0.31887876987457275, -0.4908296763896942, 1.5734566450119019, 1.8697383403778076, -1.0638948678970337, -0.2272576242685318], + [0.25006136298179626, 1.161847710609436, -0.13073162734508514, -1.4062258005142212, 0.8497498631477356, -0.8599120378494263], + [-0.6105663776397705, 1.0629346370697021, 1.2221823930740356, 0.771891176700592, -1.01389479637146, -0.5315045118331909], + [-0.6020243763923645, 0.3213997781276703, -0.060615699738264084, -1.170371651649475, -2.773613929748535, -0.02982438914477825], + [1.2784193754196167, -1.5633939504623413, 1.8777929544448853, 0.5223742127418518, 0.051756951957941055, 0.4260155260562897] + ], + [ + [0.9475129246711731, 0.4364280700683594, -1.022117257118225, 0.16795289516448975, 0.5066304802894592, 0.2779245674610138], + [1.351500153541565, -0.8949641585350037, -1.596110463142395, 0.673722505569458, -0.6978610754013062, 1.7838830947875977], + [0.2176828533411026, 1.1277714967727661, -1.5005147457122803, -0.2404831200838089, -0.4854878783226013, -0.06616523861885071], + [0.9090389609336853, 0.29768696427345276, 1.5183887481689453, -0.91494220495224, 2.251573324203491, -0.3680903911590576], + [-1.0800817012786865, 1.9881891012191772, 0.1034913882613182, 0.11661393195390701, 0.04335293173789978, 0.580488383769989], + [0.6679447293281555, 0.6449635624885559, -1.5224734544754028, -2.6157381534576416, -0.5713837146759033, -0.6671661734580994] + ] + ] + ], + "expected_output": [ + [ + [ + [-0.20516234636306763, 0.7835897207260132, 2.654942035675049], + [1.0582451820373535, -0.35750812292099, 1.9681813716888428], + [1.1372771263122559, 0.4133651852607727, 0.0061524491757154465] + ], + [ + [1.2366944551467896, 0.628537118434906, 0.6718097925186157], + [1.3185068368911743, 1.1855055093765259, 2.055619716644287], + [1.1304746866226196, 0.9458276629447937, -0.3147018849849701] + ], + [ + [0.6690162420272827, 1.609459400177002, 1.0819309949874878], + [0.7414069175720215, 1.5067633390426636, 1.6462355852127075], + [1.3902591466903687, 0.514220118522644, 1.6560721397399902] + ] + ], + [ + [ + [0.9183834791183472, 0.8357481956481934, 1.6714451313018799], + [0.7821402549743652, 0.712009072303772, 0.8906894326210022], + [0.47655177116394043, 1.2373405694961548, 0.5413376092910767] + ], + [ + [1.2400078773498535, 1.8697383403778076, 0.20281589031219482], + [1.161847710609436, 1.2221823930740356, 0.8497498631477356], + [1.2784193754196167, 1.8777929544448853, 0.4260155260562897] + ], + [ + [1.351500153541565, 0.673722505569458, 1.7838830947875977], + [1.1277714967727661, 1.5183887481689453, 2.251573324203491], + [1.9881891012191772, 0.11661393195390701, 0.580488383769989] + ] + ] + ], + "expected_grad": [ + [ + [ + [1.0, 0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0], + [1.0, 0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + ], + [ + [1.0, 0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 1.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 0.0, 0.0] + ], + [ + [0.0, 0.0, 0.0, 1.0, 0.0, 1.0], + [1.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 1.0, 0.0, 0.0, 0.0] + ] + ], + [ + [ + [0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0, 1.0], + [1.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0, 1.0] + ], + [ + [1.0, 0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 1.0, 0.0, 0.0, 1.0] + ], + [ + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 1.0, 0.0, 1.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 1.0, 0.0], + [0.0, 1.0, 0.0, 1.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + ] + ] + ] + }, + { + "test_name": "maxpool_3d_input", + "kernel_size": 2, + "stride": 2, + "padding": 0, + "input": [ + [ + [-0.020498022437095642, 0.48795756697654724, 0.37779226899147034, -1.057392954826355], + [2.5331664085388184, -1.1566962003707886, -1.0765177011489868, -0.5477657318115234], + [1.5677039623260498, 0.4278451204299927, -0.2732718884944916, 0.5940448641777039], + [0.9717037081718445, -0.40528324246406555, -0.2497664988040924, 0.22397132217884064] + ] + ], + "expected_output": [ + [ + [2.5331664085388184, 0.37779226899147034], + [1.5677039623260498, 0.5940448641777039] + ] + ], + "expected_grad": [ + [ + [0.0, 0.0, 1.0, 0.0], + [1.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 1.0], + [0.0, 0.0, 0.0, 0.0] + ] + ] + } + ] +}; diff --git a/test/torch-bridge.js b/test/torch-bridge.js new file mode 100644 index 00000000..4c6c82c4 --- /dev/null +++ b/test/torch-bridge.js @@ -0,0 +1,42 @@ +const _torch = window.torch; + +export const { + Tensor, + tensor, + no_grad, + enable_no_grad, + disable_no_grad, + is_grad_enabled, + TorchFunction, + AccumulateGrad, + nn, + optim, + eventBus, + events, + export_, + ExportedProgram, + add, + cat, + concat, + concatenate, + matmul, + relu, + mul, + randn, + allclose, + numel, + softmax, + clip, + clamp, + seed, + manual_seed, + randperm, + rand, + ones, + ones_like, + zeros, + zeros_like, + __right_index__, + __left_index__, + // etc... +} = window.torch; diff --git a/test/typed_tensor.test.ts b/test/typed_tensor.test.ts new file mode 100644 index 00000000..6b2683ae --- /dev/null +++ b/test/typed_tensor.test.ts @@ -0,0 +1,70 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; +import { Tensor, FloatTensor, LongTensor } from 'torch'; + +describe('FloatTensor', () => { + it('is an instance of Tensor', () => { + assert.instanceOf(new FloatTensor([1.5, 2.5]), Tensor); + }); + + it('preserves float values unchanged', () => { + const data = new FloatTensor([1.1, 2.9, -3.7]).toArray() as number[]; + assert.closeTo(data[0], 1.1, 1e-9); + assert.closeTo(data[1], 2.9, 1e-9); + assert.closeTo(data[2], -3.7, 1e-9); + }); + + it('works with nested (2D) data', () => { + assert.deepStrictEqual(new FloatTensor([[1.5, 2.5], [3.5, 4.5]]).shape, [2, 2]); + }); + + it('accepts requires_grad option', () => { + assert.isTrue(new FloatTensor([1.0, 2.0], { requires_grad: true }).requires_grad); + }); + + it('participates in autograd', () => { + const t = new FloatTensor([2.0, 3.0], { requires_grad: true }); + t.sum().backward(); + assert.deepStrictEqual((t.grad!.toArray() as number[]), [1, 1]); + }); + + it('also accessible as torch.FloatTensor', () => { + assert.instanceOf(new torch.FloatTensor([1.5, 2.5]), Tensor); + }); +}); + +describe('LongTensor', () => { + it('is an instance of Tensor', () => { + assert.instanceOf(new LongTensor([1, 2, 3]), Tensor); + }); + + it('truncates positive floats toward zero', () => { + assert.deepStrictEqual(new LongTensor([1.1, 1.9, 2.0]).toArray(), [1, 1, 2]); + }); + + it('truncates negative floats toward zero (not floor)', () => { + assert.deepStrictEqual(new LongTensor([-1.1, -1.9, -2.0]).toArray(), [-1, -1, -2]); + }); + + it('works with nested (2D) data', () => { + const t = new LongTensor([[1.7, 2.3], [3.9, -4.1]]); + assert.deepStrictEqual(t.shape, [2, 2]); + assert.deepStrictEqual(t.toArray(), [[1, 2], [3, -4]]); + }); + + it('accepts integer data unchanged', () => { + assert.deepStrictEqual(new LongTensor([0, 1, 2, 3]).toArray(), [0, 1, 2, 3]); + }); + + it('accepts requires_grad option and truncates', () => { + const t = new LongTensor([1.5, 2.5], { requires_grad: true }); + assert.isTrue(t.requires_grad); + assert.deepStrictEqual(t.toArray(), [1, 2]); + }); + + it('also accessible as torch.LongTensor', () => { + const t = new torch.LongTensor([1.9, -1.9]); + assert.instanceOf(t, Tensor); + assert.deepStrictEqual(t.toArray(), [1, -1]); + }); +}); diff --git a/test/umd.html b/test/umd.html new file mode 100644 index 00000000..57dddfb6 --- /dev/null +++ b/test/umd.html @@ -0,0 +1,48 @@ + + + + + + Mocha Tests + + + + + +
+ + + + + + + + + + + + diff --git a/test/view.test.ts b/test/view.test.ts new file mode 100644 index 00000000..0671c3b3 --- /dev/null +++ b/test/view.test.ts @@ -0,0 +1,269 @@ +import { assert } from 'chai'; +import * as torch from 'torch'; +import { Tensor } from 'torch'; + +describe('Tensor views via index()', () => { + + // ─── shape and values ─────────────────────────────────────────────────────── + + describe('shape and values', () => { + it('2D → 1D: correct shape and values', () => { + const x = torch.tensor([[1, 2, 3], [4, 5, 6]]); + const y = x.index(0); + assert.deepStrictEqual(y.shape, [3]); + assert.deepStrictEqual(y.toArray(), [1, 2, 3]); + }); + + it('2D → 1D: second row', () => { + const x = torch.tensor([[1, 2, 3], [4, 5, 6]]); + const y = x.index(1); + assert.deepStrictEqual(y.shape, [3]); + assert.deepStrictEqual(y.toArray(), [4, 5, 6]); + }); + + it('3D → 2D: correct shape and values', () => { + const x = torch.tensor([[[1, 2], [3, 4]], [[5, 6], [7, 8]]]); + const y = x.index(1); + assert.deepStrictEqual(y.shape, [2, 2]); + assert.deepStrictEqual(y.toArray(), [[5, 6], [7, 8]]); + }); + + it('1D → scalar: shape [] and correct value', () => { + const x = torch.tensor([10, 20, 30]); + const s = x.index(1); + assert.deepStrictEqual(s.shape, []); + assert.strictEqual(s.item(), 20); + }); + + it('negative index: last row', () => { + const x = torch.tensor([[1, 2], [3, 4], [5, 6]]); + const y = x.index(-1); + assert.deepStrictEqual(y.shape, [2]); + assert.deepStrictEqual(y.toArray(), [5, 6]); + }); + + it('negative index: second-to-last row', () => { + const x = torch.tensor([[1, 2], [3, 4], [5, 6]]); + const y = x.index(-2); + assert.deepStrictEqual(y.shape, [2]); + assert.deepStrictEqual(y.toArray(), [3, 4]); + }); + }); + + // ─── error cases ──────────────────────────────────────────────────────────── + + describe('error cases', () => { + it('throws on scalar tensor', () => { + const s = torch.tensor(5); + assert.throws(() => s.index(0), /scalar/i); + }); + + it('throws on out-of-bounds positive index', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + assert.throws(() => x.index(2), /out of bounds/i); + }); + + it('throws on out-of-bounds negative index', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + assert.throws(() => x.index(-3), /out of bounds/i); + }); + }); + + // ─── data sharing: parent → view ──────────────────────────────────────────── + + describe('data sharing: parent mutations visible in view', () => { + it('zero_() on parent zeros out the view', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(0); // view of row 0 + x.zero_(); + assert.deepStrictEqual(y.toArray(), [0, 0]); + }); + + it('data setter on parent is visible in view', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(1); // view of row 1 + x.data = [10, 20, 30, 40]; + assert.deepStrictEqual(y.toArray(), [30, 40]); + }); + }); + + // ─── data sharing: view → parent ──────────────────────────────────────────── + + describe('data sharing: view mutations visible in parent', () => { + it('zero_() on view zeros the corresponding row of parent', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(0); + y.zero_(); + assert.deepStrictEqual(x.toArray(), [[0, 0], [3, 4]]); + }); + + it('zero_() on second-row view leaves first row intact', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(1); + y.zero_(); + assert.deepStrictEqual(x.toArray(), [[1, 2], [0, 0]]); + }); + + it('data setter on view writes into parent storage', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(0); + y.data = [99, 88]; + assert.deepStrictEqual(x.toArray(), [[99, 88], [3, 4]]); + }); + + it('multiple views are all linked to the same storage', () => { + const x = torch.tensor([[1, 2], [3, 4], [5, 6]]); + const row0 = x.index(0); + const row2 = x.index(2); + row0.zero_(); + row2.data = [9, 9]; + assert.deepStrictEqual(x.toArray(), [[0, 0], [3, 4], [9, 9]]); + assert.deepStrictEqual(row0.toArray(), [0, 0]); + assert.deepStrictEqual(row2.toArray(), [9, 9]); + }); + + it('3D → 2D view: zero_() on view updates parent', () => { + const x = torch.tensor([[[1, 2], [3, 4]], [[5, 6], [7, 8]]]); + const y = x.index(0); // [[1,2],[3,4]] + y.zero_(); + assert.deepStrictEqual(x.toArray(), [[[0, 0], [0, 0]], [[5, 6], [7, 8]]]); + }); + }); + + // ─── operations on views ───────────────────────────────────────────────────── + + describe('operations on views produce correct results', () => { + it('add on a view gives correct values', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(1); // [3, 4] + const z = y.add(torch.tensor([10, 10])); + assert.deepStrictEqual(z.toArray(), [13, 14]); + // original unchanged + assert.deepStrictEqual(x.toArray(), [[1, 2], [3, 4]]); + }); + + it('mul on a view gives correct values', () => { + const x = torch.tensor([[2, 3], [4, 5]]); + const y = x.index(0); // [2, 3] + const z = y.mul(torch.tensor([2, 3])); + assert.deepStrictEqual(z.toArray(), [4, 9]); + }); + + it('sum on a view gives correct scalar', () => { + const x = torch.tensor([[1, 2, 3], [4, 5, 6]]); + const y = x.index(1); // [4, 5, 6] + assert.strictEqual(y.sum().item(), 15); + }); + + it('matmul on a 1D view gives correct dot product', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(0); // [1, 2] + const z = x.index(1); // [3, 4] + const dot = y.matmul(z); // 1*3 + 2*4 = 11 + assert.strictEqual(dot.item(), 11); + }); + }); + + // ─── detach() on a view ─────────────────────────────────────────────────────── + + describe('detach() on a view creates an independent copy', () => { + it('detached tensor has the same values', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(0); + const d = y.detach(); + assert.deepStrictEqual(d.toArray(), [1, 2]); + }); + + it('mutating the detached tensor does not affect parent', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(0); + const d = y.detach(); + d.zero_(); + // x and y should be unchanged + assert.deepStrictEqual(x.toArray(), [[1, 2], [3, 4]]); + assert.deepStrictEqual(y.toArray(), [1, 2]); + }); + + it('mutating the parent does not affect the detached copy', () => { + const x = torch.tensor([[1, 2], [3, 4]]); + const y = x.index(0); + const d = y.detach(); + x.zero_(); + assert.deepStrictEqual(d.toArray(), [1, 2]); + }); + }); + + // ─── optimizer step propagates through shared storage ──────────────────────── + + describe('optimizer step: views see updated parameter values', () => { + it('SGD step: view of param reflects new values', () => { + // param shape [2, 2], create view of first row before the step + const param = new torch.nn.Parameter(torch.tensor([[1, 2], [3, 4]])); + const row0_before = param.index(0).toArray() as number[]; + assert.deepStrictEqual(row0_before, [1, 2]); + + // Simulate what the optimizer does: param.data = newParam.data + const newValues = [10, 20, 30, 40]; + param.data = newValues; + + // A view created AFTER the update sees the new values + const row1_after = param.index(1); + assert.deepStrictEqual(row1_after.toArray(), [30, 40]); + + // A view created BEFORE the update also sees the new values (shared storage) + const row0_view = param.index(0); + assert.deepStrictEqual(row0_view.toArray(), [10, 20]); + }); + + it('full SGD training step does not corrupt parameter shape', () => { + class Linear extends torch.nn.Module { + w: torch.nn.Parameter; + constructor() { + super(); + this.w = new torch.nn.Parameter(torch.tensor([[1.0, 0.0], [0.0, 1.0]])); + this.register('w', this.w); + } + forward(x: Tensor): Tensor { + return x.matmul(this.w); + } + } + + const model = new Linear(); + const optim = new torch.optim.SGD(model.parameters(), 0.1); + + const x = torch.tensor([[1.0, 2.0]]); + const y = model.forward(x); + const loss = y.sum(); + loss.backward(); + + optim.step(); + + // After optimizer step, param shape must be preserved + assert.deepStrictEqual(model.w.shape, [2, 2]); + assert.strictEqual(model.w.dataLength(), 4); + + // View of updated param must have correct shape and values + const row0 = model.w.index(0); + assert.deepStrictEqual(row0.shape, [2]); + assert.strictEqual(row0.dataLength(), 2); + }); + }); + + // ─── chained index ──────────────────────────────────────────────────────────── + + describe('chained index()', () => { + it('index into a view gives correct scalar', () => { + const x = torch.tensor([[[1, 2], [3, 4]], [[5, 6], [7, 8]]]); + // x.index(1) → [[5,6],[7,8]], then .index(0) → [5,6], then .index(1) → 6 + const val = x.index(1).index(0).index(1); + assert.deepStrictEqual(val.shape, []); + assert.strictEqual(val.item(), 6); + }); + + it('zero_() on doubly-chained view updates root storage', () => { + const x = torch.tensor([[[1, 2], [3, 4]], [[5, 6], [7, 8]]]); + x.index(0).index(1).zero_(); // zero out [3,4] + assert.deepStrictEqual(x.toArray(), [[[1, 2], [0, 0]], [[5, 6], [7, 8]]]); + }); + }); +}); diff --git a/tsconfig.build.json b/tsconfig.build.json new file mode 100644 index 00000000..23bf1dc0 --- /dev/null +++ b/tsconfig.build.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "build/types", + "declaration": true, + "emitDeclarationOnly": true, + "noEmit": false + }, + "include": ["src"] +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..27bd2a93 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ESNext", + "useDefineForClassFields": true, + "module": "ESNext", + "lib": ["ESNext", "DOM"], + "moduleResolution": "bundler", + "strict": false, + "noImplicitAny": false, + "resolveJsonModule": true, + "esModuleInterop": true, + "noEmit": true, + "skipLibCheck": true, + "types": ["node", "mocha"] + }, + "include": ["src", "test", "vite.config.browser.ts", "vite.config.node.ts"] +} diff --git a/vite.config.browser.ts b/vite.config.browser.ts new file mode 100644 index 00000000..9122ad90 --- /dev/null +++ b/vite.config.browser.ts @@ -0,0 +1,22 @@ +import { defineConfig } from 'vite'; +import path from 'path'; + +export default defineConfig({ + build: { + minify: false, + sourcemap: true, + lib: { + entry: path.resolve(__dirname, 'src/index.ts'), + name: 'torch', + fileName: (format) => `torch.browser.${format}.js`, + formats: ['es'] // umd -> see cdn build + }, + outDir: 'build/browser', + rollupOptions: { + treeshake: true, + }, + }, + esbuild: { + keepNames: true, + } +}); diff --git a/vite.config.cdn.ts b/vite.config.cdn.ts new file mode 100644 index 00000000..dbfcb741 --- /dev/null +++ b/vite.config.cdn.ts @@ -0,0 +1,20 @@ +import { defineConfig } from 'vite'; +import path from 'path'; + +export default defineConfig({ + build: { + minify: 'esbuild', + sourcemap: true, + lib: { + entry: path.resolve(__dirname, 'src/index.ts'), + name: 'torch', + fileName: () => `torch.min.js`, + formats: ['umd'] + }, + emptyOutDir: false, + outDir: 'build', + }, + esbuild: { + keepNames: true, + } +}); diff --git a/vite.config.node.ts b/vite.config.node.ts new file mode 100644 index 00000000..174682bc --- /dev/null +++ b/vite.config.node.ts @@ -0,0 +1,16 @@ +import { defineConfig } from 'vite'; +import path from 'path'; + +export default defineConfig({ + build: { + minify: false, // don't minify for npm package + sourcemap: true, + lib: { + entry: path.resolve(__dirname, 'src/index.ts'), + fileName: (format) => format === 'es' ? 'torch.node.es.mjs' : 'torch.node.cjs', + formats: ['es', 'cjs'] + }, + outDir: 'build/node', + target: 'node20', + }, +}); diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 00000000..f10f0aaa --- /dev/null +++ b/yarn.lock @@ -0,0 +1,5855 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 8 + cacheKey: 10c0 + +"@actions/core@npm:^1.11.1": + version: 1.11.1 + resolution: "@actions/core@npm:1.11.1" + dependencies: + "@actions/exec": "npm:^1.1.1" + "@actions/http-client": "npm:^2.0.1" + checksum: 10c0/9aa30b397d8d0dbc74e69fe46b23fb105cab989beb420c57eacbfc51c6804abe8da0f46973ca9f639d532ea4c096d0f4d37da0223fbe94f304fa3c5f53537c30 + languageName: node + linkType: hard + +"@actions/exec@npm:^1.1.1": + version: 1.1.1 + resolution: "@actions/exec@npm:1.1.1" + dependencies: + "@actions/io": "npm:^1.0.1" + checksum: 10c0/4a09f6bdbe50ce68b5cf8a7254d176230d6a74bccf6ecc3857feee209a8c950ba9adec87cc5ecceb04110182d1c17117234e45557d72fde6229b7fd3a395322a + languageName: node + linkType: hard + +"@actions/http-client@npm:^2.0.1": + version: 2.2.3 + resolution: "@actions/http-client@npm:2.2.3" + dependencies: + tunnel: "npm:^0.0.6" + undici: "npm:^5.25.4" + checksum: 10c0/13141b66a42aa4afd8c50f7479e13a5cdb5084ccb3c73ec48894b8029743389a3d2bf8cdc18e23fb70cd33995740526dd308815613907571e897c3aa1e5eada6 + languageName: node + linkType: hard + +"@actions/io@npm:^1.0.1": + version: 1.1.3 + resolution: "@actions/io@npm:1.1.3" + checksum: 10c0/5b8751918e5bf0bebd923ba917fb1c0e294401e7ff0037f32c92a4efa4215550df1f6633c63fd4efb2bdaae8711e69b9e36925857db1f38935ff62a5c92ec29e + languageName: node + linkType: hard + +"@babel/helper-string-parser@npm:^7.27.1": + version: 7.27.1 + resolution: "@babel/helper-string-parser@npm:7.27.1" + checksum: 10c0/8bda3448e07b5583727c103560bcf9c4c24b3c1051a4c516d4050ef69df37bb9a4734a585fe12725b8c2763de0a265aa1e909b485a4e3270b7cfd3e4dbe4b602 + languageName: node + linkType: hard + +"@babel/helper-validator-identifier@npm:^7.27.1": + version: 7.27.1 + resolution: "@babel/helper-validator-identifier@npm:7.27.1" + checksum: 10c0/c558f11c4871d526498e49d07a84752d1800bf72ac0d3dad100309a2eaba24efbf56ea59af5137ff15e3a00280ebe588560534b0e894a4750f8b1411d8f78b84 + languageName: node + linkType: hard + +"@babel/parser@npm:^7.28.4": + version: 7.28.4 + resolution: "@babel/parser@npm:7.28.4" + dependencies: + "@babel/types": "npm:^7.28.4" + bin: + parser: ./bin/babel-parser.js + checksum: 10c0/58b239a5b1477ac7ed7e29d86d675cc81075ca055424eba6485872626db2dc556ce63c45043e5a679cd925e999471dba8a3ed4864e7ab1dbf64306ab72c52707 + languageName: node + linkType: hard + +"@babel/types@npm:^7.28.4": + version: 7.28.4 + resolution: "@babel/types@npm:7.28.4" + dependencies: + "@babel/helper-string-parser": "npm:^7.27.1" + "@babel/helper-validator-identifier": "npm:^7.27.1" + checksum: 10c0/ac6f909d6191319e08c80efbfac7bd9a25f80cc83b43cd6d82e7233f7a6b9d6e7b90236f3af7400a3f83b576895bcab9188a22b584eb0f224e80e6d4e95f4517 + languageName: node + linkType: hard + +"@bcoe/v8-coverage@npm:^1.0.1": + version: 1.0.2 + resolution: "@bcoe/v8-coverage@npm:1.0.2" + checksum: 10c0/1eb1dc93cc17fb7abdcef21a6e7b867d6aa99a7ec88ec8207402b23d9083ab22a8011213f04b2cf26d535f1d22dc26139b7929e6c2134c254bd1e14ba5e678c3 + languageName: node + linkType: hard + +"@cspotcode/source-map-support@npm:^0.8.0": + version: 0.8.1 + resolution: "@cspotcode/source-map-support@npm:0.8.1" + dependencies: + "@jridgewell/trace-mapping": "npm:0.3.9" + checksum: 10c0/05c5368c13b662ee4c122c7bfbe5dc0b613416672a829f3e78bc49a357a197e0218d6e74e7c66cfcd04e15a179acab080bd3c69658c9fbefd0e1ccd950a07fc6 + languageName: node + linkType: hard + +"@esbuild/aix-ppc64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/aix-ppc64@npm:0.25.10" + conditions: os=aix & cpu=ppc64 + languageName: node + linkType: hard + +"@esbuild/android-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/android-arm64@npm:0.25.10" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/android-arm@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/android-arm@npm:0.25.10" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@esbuild/android-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/android-x64@npm:0.25.10" + conditions: os=android & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/darwin-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/darwin-arm64@npm:0.25.10" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/darwin-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/darwin-x64@npm:0.25.10" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/freebsd-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/freebsd-arm64@npm:0.25.10" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/freebsd-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/freebsd-x64@npm:0.25.10" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/linux-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-arm64@npm:0.25.10" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/linux-arm@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-arm@npm:0.25.10" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@esbuild/linux-ia32@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-ia32@npm:0.25.10" + conditions: os=linux & cpu=ia32 + languageName: node + linkType: hard + +"@esbuild/linux-loong64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-loong64@npm:0.25.10" + conditions: os=linux & cpu=loong64 + languageName: node + linkType: hard + +"@esbuild/linux-mips64el@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-mips64el@npm:0.25.10" + conditions: os=linux & cpu=mips64el + languageName: node + linkType: hard + +"@esbuild/linux-ppc64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-ppc64@npm:0.25.10" + conditions: os=linux & cpu=ppc64 + languageName: node + linkType: hard + +"@esbuild/linux-riscv64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-riscv64@npm:0.25.10" + conditions: os=linux & cpu=riscv64 + languageName: node + linkType: hard + +"@esbuild/linux-s390x@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-s390x@npm:0.25.10" + conditions: os=linux & cpu=s390x + languageName: node + linkType: hard + +"@esbuild/linux-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/linux-x64@npm:0.25.10" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/netbsd-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/netbsd-arm64@npm:0.25.10" + conditions: os=netbsd & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/netbsd-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/netbsd-x64@npm:0.25.10" + conditions: os=netbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/openbsd-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/openbsd-arm64@npm:0.25.10" + conditions: os=openbsd & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/openbsd-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/openbsd-x64@npm:0.25.10" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/openharmony-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/openharmony-arm64@npm:0.25.10" + conditions: os=openharmony & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/sunos-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/sunos-x64@npm:0.25.10" + conditions: os=sunos & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/win32-arm64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/win32-arm64@npm:0.25.10" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/win32-ia32@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/win32-ia32@npm:0.25.10" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@esbuild/win32-x64@npm:0.25.10": + version: 0.25.10 + resolution: "@esbuild/win32-x64@npm:0.25.10" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@eslint-community/eslint-utils@npm:^4.7.0, @eslint-community/eslint-utils@npm:^4.8.0": + version: 4.9.0 + resolution: "@eslint-community/eslint-utils@npm:4.9.0" + dependencies: + eslint-visitor-keys: "npm:^3.4.3" + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + checksum: 10c0/8881e22d519326e7dba85ea915ac7a143367c805e6ba1374c987aa2fbdd09195cc51183d2da72c0e2ff388f84363e1b220fd0d19bef10c272c63455162176817 + languageName: node + linkType: hard + +"@eslint-community/regexpp@npm:^4.10.0, @eslint-community/regexpp@npm:^4.12.1": + version: 4.12.1 + resolution: "@eslint-community/regexpp@npm:4.12.1" + checksum: 10c0/a03d98c246bcb9109aec2c08e4d10c8d010256538dcb3f56610191607214523d4fb1b00aa81df830b6dffb74c5fa0be03642513a289c567949d3e550ca11cdf6 + languageName: node + linkType: hard + +"@eslint/config-array@npm:^0.21.0": + version: 0.21.0 + resolution: "@eslint/config-array@npm:0.21.0" + dependencies: + "@eslint/object-schema": "npm:^2.1.6" + debug: "npm:^4.3.1" + minimatch: "npm:^3.1.2" + checksum: 10c0/0ea801139166c4aa56465b309af512ef9b2d3c68f9198751bbc3e21894fe70f25fbf26e1b0e9fffff41857bc21bfddeee58649ae6d79aadcd747db0c5dca771f + languageName: node + linkType: hard + +"@eslint/config-helpers@npm:^0.4.0": + version: 0.4.0 + resolution: "@eslint/config-helpers@npm:0.4.0" + dependencies: + "@eslint/core": "npm:^0.16.0" + checksum: 10c0/4e20c13aaeba1fa024983785df6625b36c8f4415b2433097982e1ccb08db9909e2f7bf60b793538d52ecfd572f2c4eec39a884c13c185cb6be35151f053beed5 + languageName: node + linkType: hard + +"@eslint/core@npm:^0.16.0": + version: 0.16.0 + resolution: "@eslint/core@npm:0.16.0" + dependencies: + "@types/json-schema": "npm:^7.0.15" + checksum: 10c0/f27496a244ccfdca3e0fbc3331f9da3f603bdf1aa431af0045a3205826789a54493bc619ad6311a9090eaf7bc25798ff4e265dea1eccd2df9ce3b454f7e7da27 + languageName: node + linkType: hard + +"@eslint/eslintrc@npm:^3.3.1": + version: 3.3.1 + resolution: "@eslint/eslintrc@npm:3.3.1" + dependencies: + ajv: "npm:^6.12.4" + debug: "npm:^4.3.2" + espree: "npm:^10.0.1" + globals: "npm:^14.0.0" + ignore: "npm:^5.2.0" + import-fresh: "npm:^3.2.1" + js-yaml: "npm:^4.1.0" + minimatch: "npm:^3.1.2" + strip-json-comments: "npm:^3.1.1" + checksum: 10c0/b0e63f3bc5cce4555f791a4e487bf999173fcf27c65e1ab6e7d63634d8a43b33c3693e79f192cbff486d7df1be8ebb2bd2edc6e70ddd486cbfa84a359a3e3b41 + languageName: node + linkType: hard + +"@eslint/js@npm:9.37.0, @eslint/js@npm:^9.37.0": + version: 9.37.0 + resolution: "@eslint/js@npm:9.37.0" + checksum: 10c0/84f98a6213522fc76ea104bd910f606136200bd918544e056a7a22442d3f9d5c3c5cd7f4cdf2499d49b1fa140155b87d597a1f16d01644920f05c228e9ca0378 + languageName: node + linkType: hard + +"@eslint/object-schema@npm:^2.1.6": + version: 2.1.6 + resolution: "@eslint/object-schema@npm:2.1.6" + checksum: 10c0/b8cdb7edea5bc5f6a96173f8d768d3554a628327af536da2fc6967a93b040f2557114d98dbcdbf389d5a7b290985ad6a9ce5babc547f36fc1fde42e674d11a56 + languageName: node + linkType: hard + +"@eslint/plugin-kit@npm:^0.4.0": + version: 0.4.0 + resolution: "@eslint/plugin-kit@npm:0.4.0" + dependencies: + "@eslint/core": "npm:^0.16.0" + levn: "npm:^0.4.1" + checksum: 10c0/125614e902bb34c041da859794c47ac2ec4a814f5d9e7c4d37fcd34b38d8ee5cf1f97020d38d168885d9bf4046a9a7decb86b4cee8dac9eedcc6ad08ebafe204 + languageName: node + linkType: hard + +"@fastify/busboy@npm:^2.0.0": + version: 2.1.1 + resolution: "@fastify/busboy@npm:2.1.1" + checksum: 10c0/6f8027a8cba7f8f7b736718b013f5a38c0476eea67034c94a0d3c375e2b114366ad4419e6a6fa7ffc2ef9c6d3e0435d76dd584a7a1cbac23962fda7650b579e3 + languageName: node + linkType: hard + +"@gerrit0/mini-shiki@npm:^3.12.0": + version: 3.13.1 + resolution: "@gerrit0/mini-shiki@npm:3.13.1" + dependencies: + "@shikijs/engine-oniguruma": "npm:^3.13.0" + "@shikijs/langs": "npm:^3.13.0" + "@shikijs/themes": "npm:^3.13.0" + "@shikijs/types": "npm:^3.13.0" + "@shikijs/vscode-textmate": "npm:^10.0.2" + checksum: 10c0/926babea969fb6788eb8c36fb69ff4c45273bbebde318297a89a45807aba42d266cab5654f13b4126b0bb9c4d074dbf2505d874d917daa9381d5281a5f66953c + languageName: node + linkType: hard + +"@humanfs/core@npm:^0.19.1": + version: 0.19.1 + resolution: "@humanfs/core@npm:0.19.1" + checksum: 10c0/aa4e0152171c07879b458d0e8a704b8c3a89a8c0541726c6b65b81e84fd8b7564b5d6c633feadc6598307d34564bd53294b533491424e8e313d7ab6c7bc5dc67 + languageName: node + linkType: hard + +"@humanfs/node@npm:^0.16.6": + version: 0.16.7 + resolution: "@humanfs/node@npm:0.16.7" + dependencies: + "@humanfs/core": "npm:^0.19.1" + "@humanwhocodes/retry": "npm:^0.4.0" + checksum: 10c0/9f83d3cf2cfa37383e01e3cdaead11cd426208e04c44adcdd291aa983aaf72d7d3598844d2fe9ce54896bb1bf8bd4b56883376611c8905a19c44684642823f30 + languageName: node + linkType: hard + +"@humanwhocodes/module-importer@npm:^1.0.1": + version: 1.0.1 + resolution: "@humanwhocodes/module-importer@npm:1.0.1" + checksum: 10c0/909b69c3b86d482c26b3359db16e46a32e0fb30bd306a3c176b8313b9e7313dba0f37f519de6aa8b0a1921349e505f259d19475e123182416a506d7f87e7f529 + languageName: node + linkType: hard + +"@humanwhocodes/retry@npm:^0.4.0, @humanwhocodes/retry@npm:^0.4.2": + version: 0.4.3 + resolution: "@humanwhocodes/retry@npm:0.4.3" + checksum: 10c0/3775bb30087d4440b3f7406d5a057777d90e4b9f435af488a4923ef249e93615fb78565a85f173a186a076c7706a81d0d57d563a2624e4de2c5c9c66c486ce42 + languageName: node + linkType: hard + +"@isaacs/balanced-match@npm:^4.0.1": + version: 4.0.1 + resolution: "@isaacs/balanced-match@npm:4.0.1" + checksum: 10c0/7da011805b259ec5c955f01cee903da72ad97c5e6f01ca96197267d3f33103d5b2f8a1af192140f3aa64526c593c8d098ae366c2b11f7f17645d12387c2fd420 + languageName: node + linkType: hard + +"@isaacs/brace-expansion@npm:^5.0.0": + version: 5.0.0 + resolution: "@isaacs/brace-expansion@npm:5.0.0" + dependencies: + "@isaacs/balanced-match": "npm:^4.0.1" + checksum: 10c0/b4d4812f4be53afc2c5b6c545001ff7a4659af68d4484804e9d514e183d20269bb81def8682c01a22b17c4d6aed14292c8494f7d2ac664e547101c1a905aa977 + languageName: node + linkType: hard + +"@isaacs/cliui@npm:^8.0.2": + version: 8.0.2 + resolution: "@isaacs/cliui@npm:8.0.2" + dependencies: + string-width: "npm:^5.1.2" + string-width-cjs: "npm:string-width@^4.2.0" + strip-ansi: "npm:^7.0.1" + strip-ansi-cjs: "npm:strip-ansi@^6.0.1" + wrap-ansi: "npm:^8.1.0" + wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0" + checksum: 10c0/b1bf42535d49f11dc137f18d5e4e63a28c5569de438a221c369483731e9dac9fb797af554e8bf02b6192d1e5eba6e6402cf93900c3d0ac86391d00d04876789e + languageName: node + linkType: hard + +"@isaacs/fs-minipass@npm:^4.0.0": + version: 4.0.1 + resolution: "@isaacs/fs-minipass@npm:4.0.1" + dependencies: + minipass: "npm:^7.0.4" + checksum: 10c0/c25b6dc1598790d5b55c0947a9b7d111cfa92594db5296c3b907e2f533c033666f692a3939eadac17b1c7c40d362d0b0635dc874cbfe3e70db7c2b07cc97a5d2 + languageName: node + linkType: hard + +"@istanbuljs/schema@npm:^0.1.2, @istanbuljs/schema@npm:^0.1.3": + version: 0.1.3 + resolution: "@istanbuljs/schema@npm:0.1.3" + checksum: 10c0/61c5286771676c9ca3eb2bd8a7310a9c063fb6e0e9712225c8471c582d157392c88f5353581c8c9adbe0dff98892317d2fdfc56c3499aa42e0194405206a963a + languageName: node + linkType: hard + +"@jridgewell/resolve-uri@npm:^3.0.3, @jridgewell/resolve-uri@npm:^3.1.0": + version: 3.1.2 + resolution: "@jridgewell/resolve-uri@npm:3.1.2" + checksum: 10c0/d502e6fb516b35032331406d4e962c21fe77cdf1cbdb49c6142bcbd9e30507094b18972778a6e27cbad756209cfe34b1a27729e6fa08a2eb92b33943f680cf1e + languageName: node + linkType: hard + +"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.14, @jridgewell/sourcemap-codec@npm:^1.5.5": + version: 1.5.5 + resolution: "@jridgewell/sourcemap-codec@npm:1.5.5" + checksum: 10c0/f9e538f302b63c0ebc06eecb1dd9918dd4289ed36147a0ddce35d6ea4d7ebbda243cda7b2213b6a5e1d8087a298d5cf630fb2bd39329cdecb82017023f6081a0 + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:0.3.9": + version: 0.3.9 + resolution: "@jridgewell/trace-mapping@npm:0.3.9" + dependencies: + "@jridgewell/resolve-uri": "npm:^3.0.3" + "@jridgewell/sourcemap-codec": "npm:^1.4.10" + checksum: 10c0/fa425b606d7c7ee5bfa6a31a7b050dd5814b4082f318e0e4190f991902181b4330f43f4805db1dd4f2433fd0ed9cc7a7b9c2683f1deeab1df1b0a98b1e24055b + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:^0.3.12": + version: 0.3.31 + resolution: "@jridgewell/trace-mapping@npm:0.3.31" + dependencies: + "@jridgewell/resolve-uri": "npm:^3.1.0" + "@jridgewell/sourcemap-codec": "npm:^1.4.14" + checksum: 10c0/4b30ec8cd56c5fd9a661f088230af01e0c1a3888d11ffb6b47639700f71225be21d1f7e168048d6d4f9449207b978a235c07c8f15c07705685d16dc06280e9d9 + languageName: node + linkType: hard + +"@jsdevtools/ez-spawn@npm:^3.0.4": + version: 3.0.4 + resolution: "@jsdevtools/ez-spawn@npm:3.0.4" + dependencies: + call-me-maybe: "npm:^1.0.1" + cross-spawn: "npm:^7.0.3" + string-argv: "npm:^0.3.1" + type-detect: "npm:^4.0.8" + checksum: 10c0/fb56f99c4d09ccaeace4ebaecfa31ab1ff90517d5ca51dbd624f4b9ab489eee469504ab33a12a2bc8b5f0a35ee61e139cc087b242ad7947c61674454cb41c75e + languageName: node + linkType: hard + +"@microsoft/api-extractor-model@npm:7.31.0": + version: 7.31.0 + resolution: "@microsoft/api-extractor-model@npm:7.31.0" + dependencies: + "@microsoft/tsdoc": "npm:~0.15.1" + "@microsoft/tsdoc-config": "npm:~0.17.1" + "@rushstack/node-core-library": "npm:5.16.0" + checksum: 10c0/b8fc06a8aea98fc6f9357e8ea59177fc3432e0c309a1604d80d28ed6723bd9c5292c02723d7ad1a5097aef54d3daaf65c529bf9c3717edff2f02994f92dc8fd1 + languageName: node + linkType: hard + +"@microsoft/api-extractor@npm:^7.50.1": + version: 7.53.0 + resolution: "@microsoft/api-extractor@npm:7.53.0" + dependencies: + "@microsoft/api-extractor-model": "npm:7.31.0" + "@microsoft/tsdoc": "npm:~0.15.1" + "@microsoft/tsdoc-config": "npm:~0.17.1" + "@rushstack/node-core-library": "npm:5.16.0" + "@rushstack/rig-package": "npm:0.6.0" + "@rushstack/terminal": "npm:0.19.0" + "@rushstack/ts-command-line": "npm:5.1.0" + lodash: "npm:~4.17.15" + minimatch: "npm:10.0.3" + resolve: "npm:~1.22.1" + semver: "npm:~7.5.4" + source-map: "npm:~0.6.1" + typescript: "npm:5.8.2" + bin: + api-extractor: bin/api-extractor + checksum: 10c0/bc092cadd8f5dec025b9b379ed6e7be6ff1ce694be0e3f53d04829c50897f085ee5dd25793f2727fe2bd862175a2b69d022009212bc0ec36eae225e6bb810e36 + languageName: node + linkType: hard + +"@microsoft/tsdoc-config@npm:~0.17.1": + version: 0.17.1 + resolution: "@microsoft/tsdoc-config@npm:0.17.1" + dependencies: + "@microsoft/tsdoc": "npm:0.15.1" + ajv: "npm:~8.12.0" + jju: "npm:~1.4.0" + resolve: "npm:~1.22.2" + checksum: 10c0/a686355796f492f27af17e2a17d615221309caf4d9f9047a5a8f17f8625c467c4c81e2a7923ddafd71b892631d5e5013c4b8cc49c5867d3cc1d260fd90c1413d + languageName: node + linkType: hard + +"@microsoft/tsdoc@npm:0.15.1, @microsoft/tsdoc@npm:~0.15.1": + version: 0.15.1 + resolution: "@microsoft/tsdoc@npm:0.15.1" + checksum: 10c0/09948691fac56c45a0d1920de478d66a30371a325bd81addc92eea5654d95106ce173c440fea1a1bd5bb95b3a544b6d4def7bb0b5a846c05d043575d8369a20c + languageName: node + linkType: hard + +"@nodelib/fs.scandir@npm:2.1.5": + version: 2.1.5 + resolution: "@nodelib/fs.scandir@npm:2.1.5" + dependencies: + "@nodelib/fs.stat": "npm:2.0.5" + run-parallel: "npm:^1.1.9" + checksum: 10c0/732c3b6d1b1e967440e65f284bd06e5821fedf10a1bea9ed2bb75956ea1f30e08c44d3def9d6a230666574edbaf136f8cfd319c14fd1f87c66e6a44449afb2eb + languageName: node + linkType: hard + +"@nodelib/fs.stat@npm:2.0.5, @nodelib/fs.stat@npm:^2.0.2": + version: 2.0.5 + resolution: "@nodelib/fs.stat@npm:2.0.5" + checksum: 10c0/88dafe5e3e29a388b07264680dc996c17f4bda48d163a9d4f5c1112979f0ce8ec72aa7116122c350b4e7976bc5566dc3ddb579be1ceaacc727872eb4ed93926d + languageName: node + linkType: hard + +"@nodelib/fs.walk@npm:^1.2.3": + version: 1.2.8 + resolution: "@nodelib/fs.walk@npm:1.2.8" + dependencies: + "@nodelib/fs.scandir": "npm:2.1.5" + fastq: "npm:^1.6.0" + checksum: 10c0/db9de047c3bb9b51f9335a7bb46f4fcfb6829fb628318c12115fbaf7d369bfce71c15b103d1fc3b464812d936220ee9bc1c8f762d032c9f6be9acc99249095b1 + languageName: node + linkType: hard + +"@npmcli/agent@npm:^3.0.0": + version: 3.0.0 + resolution: "@npmcli/agent@npm:3.0.0" + dependencies: + agent-base: "npm:^7.1.0" + http-proxy-agent: "npm:^7.0.0" + https-proxy-agent: "npm:^7.0.1" + lru-cache: "npm:^10.0.1" + socks-proxy-agent: "npm:^8.0.3" + checksum: 10c0/efe37b982f30740ee77696a80c196912c274ecd2cb243bc6ae7053a50c733ce0f6c09fda085145f33ecf453be19654acca74b69e81eaad4c90f00ccffe2f9271 + languageName: node + linkType: hard + +"@npmcli/fs@npm:^4.0.0": + version: 4.0.0 + resolution: "@npmcli/fs@npm:4.0.0" + dependencies: + semver: "npm:^7.3.5" + checksum: 10c0/c90935d5ce670c87b6b14fab04a965a3b8137e585f8b2a6257263bd7f97756dd736cb165bb470e5156a9e718ecd99413dccc54b1138c1a46d6ec7cf325982fe5 + languageName: node + linkType: hard + +"@octokit/action@npm:^6.1.0": + version: 6.1.0 + resolution: "@octokit/action@npm:6.1.0" + dependencies: + "@octokit/auth-action": "npm:^4.0.0" + "@octokit/core": "npm:^5.0.0" + "@octokit/plugin-paginate-rest": "npm:^9.0.0" + "@octokit/plugin-rest-endpoint-methods": "npm:^10.0.0" + "@octokit/types": "npm:^12.0.0" + undici: "npm:^6.0.0" + checksum: 10c0/c5d692733020a9cd5c85eabe587c7a609a19dc00b4b49294be838cb4f4484b60d69a30c00df00f8fc62d11e9481361eacd4cc829afce58257684bc50520aed08 + languageName: node + linkType: hard + +"@octokit/auth-action@npm:^4.0.0": + version: 4.1.0 + resolution: "@octokit/auth-action@npm:4.1.0" + dependencies: + "@octokit/auth-token": "npm:^4.0.0" + "@octokit/types": "npm:^13.0.0" + checksum: 10c0/f7f0a222786f348dd0ee6ba72e6c54f1ce36b15c8bb0ed998e8966cea23ee912abeb89b8613c8b354fa6bd1ee7199f74deb609607762d3411c0c8990b2c53fb4 + languageName: node + linkType: hard + +"@octokit/auth-token@npm:^4.0.0": + version: 4.0.0 + resolution: "@octokit/auth-token@npm:4.0.0" + checksum: 10c0/57acaa6c394c5abab2f74e8e1dcf4e7a16b236f713c77a54b8f08e2d14114de94b37946259e33ec2aab0566b26f724c2b71d2602352b59e541a9854897618f3c + languageName: node + linkType: hard + +"@octokit/core@npm:^5.0.0": + version: 5.2.2 + resolution: "@octokit/core@npm:5.2.2" + dependencies: + "@octokit/auth-token": "npm:^4.0.0" + "@octokit/graphql": "npm:^7.1.0" + "@octokit/request": "npm:^8.4.1" + "@octokit/request-error": "npm:^5.1.1" + "@octokit/types": "npm:^13.0.0" + before-after-hook: "npm:^2.2.0" + universal-user-agent: "npm:^6.0.0" + checksum: 10c0/b4484d85552303b839613e2133dcd064fa06a7c10fe0ebd11ba8f67cb8e3384e48983c589f4d1dc0fa3754857784e3d90ff4eab9782e118baf13ddd1b834957c + languageName: node + linkType: hard + +"@octokit/endpoint@npm:^9.0.6": + version: 9.0.6 + resolution: "@octokit/endpoint@npm:9.0.6" + dependencies: + "@octokit/types": "npm:^13.1.0" + universal-user-agent: "npm:^6.0.0" + checksum: 10c0/8e06197b21869aeb498e0315093ca6fbee12bd1bdcfc1667fcd7d79d827d84f2c5a30702ffd28bba7879780e367d14c30df5b20d47fcaed5de5fdc05f5d4e013 + languageName: node + linkType: hard + +"@octokit/graphql@npm:^7.1.0": + version: 7.1.1 + resolution: "@octokit/graphql@npm:7.1.1" + dependencies: + "@octokit/request": "npm:^8.4.1" + "@octokit/types": "npm:^13.0.0" + universal-user-agent: "npm:^6.0.0" + checksum: 10c0/c27216200f3f4ce7ce2a694fb7ea43f8ea4a807fbee3a423c41ed137dd7948dfc0bbf6ea1656f029a7625c84b583acdef740a7032266d0eff55305c91c3a1ed6 + languageName: node + linkType: hard + +"@octokit/openapi-types@npm:^20.0.0": + version: 20.0.0 + resolution: "@octokit/openapi-types@npm:20.0.0" + checksum: 10c0/5176dcc3b9d182ede3d446750cfa5cf31139624785a73fcf3511e3102a802b4d7cc45e999c27ed91d73fe8b7d718c8c406facb48688926921a71fe603b7db95d + languageName: node + linkType: hard + +"@octokit/openapi-types@npm:^24.2.0": + version: 24.2.0 + resolution: "@octokit/openapi-types@npm:24.2.0" + checksum: 10c0/8f47918b35e9b7f6109be6f7c8fc3a64ad13a48233112b29e92559e64a564b810eb3ebf81b4cd0af1bb2989d27b9b95cca96e841ec4e23a3f68703cefe62fd9e + languageName: node + linkType: hard + +"@octokit/plugin-paginate-rest@npm:^9.0.0": + version: 9.2.2 + resolution: "@octokit/plugin-paginate-rest@npm:9.2.2" + dependencies: + "@octokit/types": "npm:^12.6.0" + peerDependencies: + "@octokit/core": 5 + checksum: 10c0/e9c85b17064fe6b62f9af88dba008f3daef456b1195340ea0831990e9c4dbabe89be950b6e5dc924ebcca18ad1aaa0cf6df7d824dc8be26ce9a55f20336ff815 + languageName: node + linkType: hard + +"@octokit/plugin-rest-endpoint-methods@npm:^10.0.0": + version: 10.4.1 + resolution: "@octokit/plugin-rest-endpoint-methods@npm:10.4.1" + dependencies: + "@octokit/types": "npm:^12.6.0" + peerDependencies: + "@octokit/core": 5 + checksum: 10c0/4b8f64c0f7fa12464546ad312a5289c2a799967e01e90e2c4923ec6e9604cf212dcb50d9795c9a688867f973c9c529c5950368564c560406c652bcd298f090af + languageName: node + linkType: hard + +"@octokit/request-error@npm:^5.1.1": + version: 5.1.1 + resolution: "@octokit/request-error@npm:5.1.1" + dependencies: + "@octokit/types": "npm:^13.1.0" + deprecation: "npm:^2.0.0" + once: "npm:^1.4.0" + checksum: 10c0/dc9fc76ea5e4199273e4665ce9ddf345fe8f25578d9999c9a16f276298e61ee6fe0e6f5a6147b91ba3b34fdf5b9e6b7af6ae13d6333175e95b30c574088f7a2d + languageName: node + linkType: hard + +"@octokit/request@npm:^8.4.1": + version: 8.4.1 + resolution: "@octokit/request@npm:8.4.1" + dependencies: + "@octokit/endpoint": "npm:^9.0.6" + "@octokit/request-error": "npm:^5.1.1" + "@octokit/types": "npm:^13.1.0" + universal-user-agent: "npm:^6.0.0" + checksum: 10c0/1a69dcb7336de708a296db9e9a58040e5b284a87495a63112f80eb0007da3fc96a9fadecb9e875fc63cf179c23a0f81031fbef2a6f610a219e45805ead03fcf3 + languageName: node + linkType: hard + +"@octokit/types@npm:^12.0.0, @octokit/types@npm:^12.6.0": + version: 12.6.0 + resolution: "@octokit/types@npm:12.6.0" + dependencies: + "@octokit/openapi-types": "npm:^20.0.0" + checksum: 10c0/0bea58bda46c93287f5a80a0e52bc60e7dc7136b8a38c3569d63d073fb9df4a56acdb9d9bdba9978f37c374a4a6e3e52886ef5b08cace048adb0012cacef942c + languageName: node + linkType: hard + +"@octokit/types@npm:^13.0.0, @octokit/types@npm:^13.1.0": + version: 13.10.0 + resolution: "@octokit/types@npm:13.10.0" + dependencies: + "@octokit/openapi-types": "npm:^24.2.0" + checksum: 10c0/f66a401b89d653ec28e5c1529abdb7965752db4d9d40fa54c80e900af4c6bf944af6bd0a83f5b4f1eecb72e3d646899dfb27ffcf272ac243552de7e3b97a038d + languageName: node + linkType: hard + +"@pkgjs/parseargs@npm:^0.11.0": + version: 0.11.0 + resolution: "@pkgjs/parseargs@npm:0.11.0" + checksum: 10c0/5bd7576bb1b38a47a7fc7b51ac9f38748e772beebc56200450c4a817d712232b8f1d3ef70532c80840243c657d491cf6a6be1e3a214cff907645819fdc34aadd + languageName: node + linkType: hard + +"@rollup/pluginutils@npm:^5.1.4": + version: 5.3.0 + resolution: "@rollup/pluginutils@npm:5.3.0" + dependencies: + "@types/estree": "npm:^1.0.0" + estree-walker: "npm:^2.0.2" + picomatch: "npm:^4.0.2" + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 10c0/001834bf62d7cf5bac424d2617c113f7f7d3b2bf3c1778cbcccb72cdc957b68989f8e7747c782c2b911f1dde8257f56f8ac1e779e29e74e638e3f1e2cac2bcd0 + languageName: node + linkType: hard + +"@rollup/rollup-android-arm-eabi@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.52.4" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@rollup/rollup-android-arm64@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-android-arm64@npm:4.52.4" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-arm64@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-darwin-arm64@npm:4.52.4" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-x64@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-darwin-x64@npm:4.52.4" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-freebsd-arm64@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-freebsd-arm64@npm:4.52.4" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-freebsd-x64@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-freebsd-x64@npm:4.52.4" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-gnueabihf@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.52.4" + conditions: os=linux & cpu=arm & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-musleabihf@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.52.4" + conditions: os=linux & cpu=arm & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-gnu@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.52.4" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-musl@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.52.4" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-loong64-gnu@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-loong64-gnu@npm:4.52.4" + conditions: os=linux & cpu=loong64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-ppc64-gnu@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.52.4" + conditions: os=linux & cpu=ppc64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.52.4" + conditions: os=linux & cpu=riscv64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-musl@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.52.4" + conditions: os=linux & cpu=riscv64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-s390x-gnu@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.52.4" + conditions: os=linux & cpu=s390x & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-gnu@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.52.4" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-musl@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.52.4" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-openharmony-arm64@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-openharmony-arm64@npm:4.52.4" + conditions: os=openharmony & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-arm64-msvc@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.52.4" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-ia32-msvc@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.52.4" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-gnu@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-win32-x64-gnu@npm:4.52.4" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.52.4": + version: 4.52.4 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.52.4" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@rtsao/scc@npm:^1.1.0": + version: 1.1.0 + resolution: "@rtsao/scc@npm:1.1.0" + checksum: 10c0/b5bcfb0d87f7d1c1c7c0f7693f53b07866ed9fec4c34a97a8c948fb9a7c0082e416ce4d3b60beb4f5e167cbe04cdeefbf6771320f3ede059b9ce91188c409a5b + languageName: node + linkType: hard + +"@rushstack/node-core-library@npm:5.16.0": + version: 5.16.0 + resolution: "@rushstack/node-core-library@npm:5.16.0" + dependencies: + ajv: "npm:~8.13.0" + ajv-draft-04: "npm:~1.0.0" + ajv-formats: "npm:~3.0.1" + fs-extra: "npm:~11.3.0" + import-lazy: "npm:~4.0.0" + jju: "npm:~1.4.0" + resolve: "npm:~1.22.1" + semver: "npm:~7.5.4" + peerDependencies: + "@types/node": "*" + peerDependenciesMeta: + "@types/node": + optional: true + checksum: 10c0/b89ede99eb050f8430c742d8cb57446bba9cb0aaa0948df75cc0b93abfdb1cc83fc6f8d1d984a6011c4e92a32229de3c43273e9d49c7d5b84693b5cf159f6907 + languageName: node + linkType: hard + +"@rushstack/problem-matcher@npm:0.1.1": + version: 0.1.1 + resolution: "@rushstack/problem-matcher@npm:0.1.1" + peerDependencies: + "@types/node": "*" + peerDependenciesMeta: + "@types/node": + optional: true + checksum: 10c0/c847e721d3536ebb316fdd90b3e4033a7d24ff8c70e38e3eaeaadf167c4d14a7f16377ae4af8097532386bcfa81c15cfec7d2da517542c07882d273d56861d78 + languageName: node + linkType: hard + +"@rushstack/rig-package@npm:0.6.0": + version: 0.6.0 + resolution: "@rushstack/rig-package@npm:0.6.0" + dependencies: + resolve: "npm:~1.22.1" + strip-json-comments: "npm:~3.1.1" + checksum: 10c0/303c5c010a698343124036414dbeed44b24e67585307ffa6effd052624b0384cc08a12aeb153e8466b7abd6f516900ecf8629600230f0f2c33cd5c0c3dace65e + languageName: node + linkType: hard + +"@rushstack/terminal@npm:0.19.0": + version: 0.19.0 + resolution: "@rushstack/terminal@npm:0.19.0" + dependencies: + "@rushstack/node-core-library": "npm:5.16.0" + "@rushstack/problem-matcher": "npm:0.1.1" + supports-color: "npm:~8.1.1" + peerDependencies: + "@types/node": "*" + peerDependenciesMeta: + "@types/node": + optional: true + checksum: 10c0/c7e9b14c9ad1a174861ec15d19f6725db3c74771c87b16089a2010d8721e7136f72dcf432832ad8a348d45805360983206f10095e9d4ac1eb3861ff243062b30 + languageName: node + linkType: hard + +"@rushstack/ts-command-line@npm:5.1.0": + version: 5.1.0 + resolution: "@rushstack/ts-command-line@npm:5.1.0" + dependencies: + "@rushstack/terminal": "npm:0.19.0" + "@types/argparse": "npm:1.0.38" + argparse: "npm:~1.0.9" + string-argv: "npm:~0.3.1" + checksum: 10c0/5b32e2f3bc11c32bed6f46510bccb53d691d41769767de823cb945baa0a999918a69b4f69e35db7d7df1ded67fa547085ff46856cea383475f4d2254f6400576 + languageName: node + linkType: hard + +"@shikijs/engine-oniguruma@npm:^3.13.0": + version: 3.13.0 + resolution: "@shikijs/engine-oniguruma@npm:3.13.0" + dependencies: + "@shikijs/types": "npm:3.13.0" + "@shikijs/vscode-textmate": "npm:^10.0.2" + checksum: 10c0/0cd0307028acf0a30fff7de642b84d4600aa33086f88952f1313f9ef56b604e067ebeb2e64f4e9025c06c68dfd6434c2c5da83d385af4792b622e6ad07f7613f + languageName: node + linkType: hard + +"@shikijs/langs@npm:^3.13.0": + version: 3.13.0 + resolution: "@shikijs/langs@npm:3.13.0" + dependencies: + "@shikijs/types": "npm:3.13.0" + checksum: 10c0/3fe59b55b5d1da9784cd93dc2eaae19249c5d218b39ce52c0c802b38894cdedcc55ccf813486a9362be0c97bbc0568a4f7bb2a62bf2ee0edbb2d52852878c8ed + languageName: node + linkType: hard + +"@shikijs/themes@npm:^3.13.0": + version: 3.13.0 + resolution: "@shikijs/themes@npm:3.13.0" + dependencies: + "@shikijs/types": "npm:3.13.0" + checksum: 10c0/b00052267de6f8acf09d01994823234ef4f75285d4c6587f039f5081490462a50ef73defb916add45fec1f469cf0c15ed53e5ada8ca9a48ebc7a243e4a76bbc6 + languageName: node + linkType: hard + +"@shikijs/types@npm:3.13.0, @shikijs/types@npm:^3.13.0": + version: 3.13.0 + resolution: "@shikijs/types@npm:3.13.0" + dependencies: + "@shikijs/vscode-textmate": "npm:^10.0.2" + "@types/hast": "npm:^3.0.4" + checksum: 10c0/5f0ceca1dad4f4dfb8c424f1aa78953ace7eb2215d82b863500f1ea023faf55acaa54373f3b59a8ada85f15c304cf658b95eae128c43505855d13607d979a726 + languageName: node + linkType: hard + +"@shikijs/vscode-textmate@npm:^10.0.2": + version: 10.0.2 + resolution: "@shikijs/vscode-textmate@npm:10.0.2" + checksum: 10c0/36b682d691088ec244de292dc8f91b808f95c89466af421cf84cbab92230f03c8348649c14b3251991b10ce632b0c715e416e992dd5f28ff3221dc2693fd9462 + languageName: node + linkType: hard + +"@tsconfig/node10@npm:^1.0.7": + version: 1.0.11 + resolution: "@tsconfig/node10@npm:1.0.11" + checksum: 10c0/28a0710e5d039e0de484bdf85fee883bfd3f6a8980601f4d44066b0a6bcd821d31c4e231d1117731c4e24268bd4cf2a788a6787c12fc7f8d11014c07d582783c + languageName: node + linkType: hard + +"@tsconfig/node12@npm:^1.0.7": + version: 1.0.11 + resolution: "@tsconfig/node12@npm:1.0.11" + checksum: 10c0/dddca2b553e2bee1308a056705103fc8304e42bb2d2cbd797b84403a223b25c78f2c683ec3e24a095e82cd435387c877239bffcb15a590ba817cd3f6b9a99fd9 + languageName: node + linkType: hard + +"@tsconfig/node14@npm:^1.0.0": + version: 1.0.3 + resolution: "@tsconfig/node14@npm:1.0.3" + checksum: 10c0/67c1316d065fdaa32525bc9449ff82c197c4c19092b9663b23213c8cbbf8d88b6ed6a17898e0cbc2711950fbfaf40388938c1c748a2ee89f7234fc9e7fe2bf44 + languageName: node + linkType: hard + +"@tsconfig/node16@npm:^1.0.2": + version: 1.0.4 + resolution: "@tsconfig/node16@npm:1.0.4" + checksum: 10c0/05f8f2734e266fb1839eb1d57290df1664fe2aa3b0fdd685a9035806daa635f7519bf6d5d9b33f6e69dd545b8c46bd6e2b5c79acb2b1f146e885f7f11a42a5bb + languageName: node + linkType: hard + +"@types/argparse@npm:1.0.38": + version: 1.0.38 + resolution: "@types/argparse@npm:1.0.38" + checksum: 10c0/4fc892da5df16923f48180da2d1f4562fa8b0507cf636b24780444fa0a1d7321d4dc0c0ecbee6152968823f5a2ae0d321b4f8c705a489bf1ae1245bdeb0868fd + languageName: node + linkType: hard + +"@types/estree@npm:1.0.8, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.6": + version: 1.0.8 + resolution: "@types/estree@npm:1.0.8" + checksum: 10c0/39d34d1afaa338ab9763f37ad6066e3f349444f9052b9676a7cc0252ef9485a41c6d81c9c4e0d26e9077993354edf25efc853f3224dd4b447175ef62bdcc86a5 + languageName: node + linkType: hard + +"@types/hast@npm:^3.0.4": + version: 3.0.4 + resolution: "@types/hast@npm:3.0.4" + dependencies: + "@types/unist": "npm:*" + checksum: 10c0/3249781a511b38f1d330fd1e3344eed3c4e7ea8eff82e835d35da78e637480d36fad37a78be5a7aed8465d237ad0446abc1150859d0fde395354ea634decf9f7 + languageName: node + linkType: hard + +"@types/istanbul-lib-coverage@npm:^2.0.1": + version: 2.0.6 + resolution: "@types/istanbul-lib-coverage@npm:2.0.6" + checksum: 10c0/3948088654f3eeb45363f1db158354fb013b362dba2a5c2c18c559484d5eb9f6fd85b23d66c0a7c2fcfab7308d0a585b14dadaca6cc8bf89ebfdc7f8f5102fb7 + languageName: node + linkType: hard + +"@types/json-schema@npm:^7.0.15": + version: 7.0.15 + resolution: "@types/json-schema@npm:7.0.15" + checksum: 10c0/a996a745e6c5d60292f36731dd41341339d4eeed8180bb09226e5c8d23759067692b1d88e5d91d72ee83dfc00d3aca8e7bd43ea120516c17922cbcb7c3e252db + languageName: node + linkType: hard + +"@types/json5@npm:^0.0.29": + version: 0.0.29 + resolution: "@types/json5@npm:0.0.29" + checksum: 10c0/6bf5337bc447b706bb5b4431d37686aa2ea6d07cfd6f79cc31de80170d6ff9b1c7384a9c0ccbc45b3f512bae9e9f75c2e12109806a15331dc94e8a8db6dbb4ac + languageName: node + linkType: hard + +"@types/mocha@npm:^10.0.10": + version: 10.0.10 + resolution: "@types/mocha@npm:10.0.10" + checksum: 10c0/d2b8c48138cde6923493e42b38e839695eb42edd04629abe480a8f34c0e3f50dd82a55832c2e8d2b6e6f9e4deb492d7d733e600fbbdd5a0ceccbcfc6844ff9d5 + languageName: node + linkType: hard + +"@types/node@npm:^24.6.2": + version: 24.6.2 + resolution: "@types/node@npm:24.6.2" + dependencies: + undici-types: "npm:~7.13.0" + checksum: 10c0/d029757711be85ec468686f66cd8eca78f5996d7e2b1a5b818436e0299b19925b0fb4f7509a6b62750abdc72d66f5750ce22fb8b55559baca86df89a9c44722e + languageName: node + linkType: hard + +"@types/unist@npm:*": + version: 3.0.3 + resolution: "@types/unist@npm:3.0.3" + checksum: 10c0/2b1e4adcab78388e088fcc3c0ae8700f76619dbcb4741d7d201f87e2cb346bfc29a89003cfea2d76c996e1061452e14fcd737e8b25aacf949c1f2d6b2bc3dd60 + languageName: node + linkType: hard + +"@typescript-eslint/eslint-plugin@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/eslint-plugin@npm:8.46.0" + dependencies: + "@eslint-community/regexpp": "npm:^4.10.0" + "@typescript-eslint/scope-manager": "npm:8.46.0" + "@typescript-eslint/type-utils": "npm:8.46.0" + "@typescript-eslint/utils": "npm:8.46.0" + "@typescript-eslint/visitor-keys": "npm:8.46.0" + graphemer: "npm:^1.4.0" + ignore: "npm:^7.0.0" + natural-compare: "npm:^1.4.0" + ts-api-utils: "npm:^2.1.0" + peerDependencies: + "@typescript-eslint/parser": ^8.46.0 + eslint: ^8.57.0 || ^9.0.0 + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/9de2b2127b977b0d73733042602a744e5b69bfe906c6dac424123ff9726816dcc4bb3d4ba470bc1fc5c741421f53274a3a896c09fbb50e298352d4a72011b2c2 + languageName: node + linkType: hard + +"@typescript-eslint/parser@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/parser@npm:8.46.0" + dependencies: + "@typescript-eslint/scope-manager": "npm:8.46.0" + "@typescript-eslint/types": "npm:8.46.0" + "@typescript-eslint/typescript-estree": "npm:8.46.0" + "@typescript-eslint/visitor-keys": "npm:8.46.0" + debug: "npm:^4.3.4" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/2e8c75b72c7cf170aca554014dbe30e85478d96799a2eb782c4fb61423c0c9e4416e98d6e7903601b1738ea1d0936417dbf61ac0293a0500f56e0eaeefbb2ecd + languageName: node + linkType: hard + +"@typescript-eslint/project-service@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/project-service@npm:8.46.0" + dependencies: + "@typescript-eslint/tsconfig-utils": "npm:^8.46.0" + "@typescript-eslint/types": "npm:^8.46.0" + debug: "npm:^4.3.4" + peerDependencies: + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/c3164c795570edfa141917f3099724eca70383b016be1b08f656a491b459d68cf8e2547ac416d75048d3511ca5feaea0586aabad339e3dfe2ae6fddb650d7bc8 + languageName: node + linkType: hard + +"@typescript-eslint/scope-manager@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/scope-manager@npm:8.46.0" + dependencies: + "@typescript-eslint/types": "npm:8.46.0" + "@typescript-eslint/visitor-keys": "npm:8.46.0" + checksum: 10c0/9c242d1edd51247559f99dd8986bdb571db0a2a583a2d02ee8f5f346d265e956f413b442c27e1b02d55ce3944609f6593050ec657be672d9b24b7ed0a359a6ad + languageName: node + linkType: hard + +"@typescript-eslint/tsconfig-utils@npm:8.46.0, @typescript-eslint/tsconfig-utils@npm:^8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/tsconfig-utils@npm:8.46.0" + peerDependencies: + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/306b27c741709f2435dd1c7eabdf552775dff1b3ced01d791c5b9755394ceb3f37c9bcceec92adb6fe60c622523f9d47d9b0d9e515071f47d50527705a4706f7 + languageName: node + linkType: hard + +"@typescript-eslint/type-utils@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/type-utils@npm:8.46.0" + dependencies: + "@typescript-eslint/types": "npm:8.46.0" + "@typescript-eslint/typescript-estree": "npm:8.46.0" + "@typescript-eslint/utils": "npm:8.46.0" + debug: "npm:^4.3.4" + ts-api-utils: "npm:^2.1.0" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/a0fa4617a998094bc217be1989b76a3e45c058117cda027a723ff6f98f15e5237abfa123284afbdea7f320b4da65e1053ed47c8a211dd012591908a9daa46f02 + languageName: node + linkType: hard + +"@typescript-eslint/types@npm:8.46.0, @typescript-eslint/types@npm:^8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/types@npm:8.46.0" + checksum: 10c0/2f986852139bcbe940b4aafe79bbd28dcca7176e95ba4e3880984ef58c81ad077ca9d9191aad56d2b1df6d16060f5744a96ab3118ddbc9766e5035ed470445c1 + languageName: node + linkType: hard + +"@typescript-eslint/typescript-estree@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/typescript-estree@npm:8.46.0" + dependencies: + "@typescript-eslint/project-service": "npm:8.46.0" + "@typescript-eslint/tsconfig-utils": "npm:8.46.0" + "@typescript-eslint/types": "npm:8.46.0" + "@typescript-eslint/visitor-keys": "npm:8.46.0" + debug: "npm:^4.3.4" + fast-glob: "npm:^3.3.2" + is-glob: "npm:^4.0.3" + minimatch: "npm:^9.0.4" + semver: "npm:^7.6.0" + ts-api-utils: "npm:^2.1.0" + peerDependencies: + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/39aed033dc23c3356e39891c9eba6dde0dc618406f0e13e9adc5967fb81790ec199b1d6eb1144e35ad13a0daaf72157f5f3fc7ac1b7c58d152ade68fe27ad221 + languageName: node + linkType: hard + +"@typescript-eslint/utils@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/utils@npm:8.46.0" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.7.0" + "@typescript-eslint/scope-manager": "npm:8.46.0" + "@typescript-eslint/types": "npm:8.46.0" + "@typescript-eslint/typescript-estree": "npm:8.46.0" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/77cc7dff9132d9f02e8766d128edbeb7c2f2b56f9ebdac7308e75a04924e2369857da27b23f0054476c9640609a9707b8dd8ca8b1c59a067e45f65bf5ef4cc1b + languageName: node + linkType: hard + +"@typescript-eslint/visitor-keys@npm:8.46.0": + version: 8.46.0 + resolution: "@typescript-eslint/visitor-keys@npm:8.46.0" + dependencies: + "@typescript-eslint/types": "npm:8.46.0" + eslint-visitor-keys: "npm:^4.2.1" + checksum: 10c0/473dd4861b81238c1df10008b3b6d4684b2fa5ec4f3a8eeb544ea1278a5e2119f839447d16653ea3070164d7e742e3516fe9b0faf16e12a457fa26d5e14a7498 + languageName: node + linkType: hard + +"@volar/language-core@npm:2.4.23, @volar/language-core@npm:~2.4.11": + version: 2.4.23 + resolution: "@volar/language-core@npm:2.4.23" + dependencies: + "@volar/source-map": "npm:2.4.23" + checksum: 10c0/1b8d60c7c0faa29ef5ec46dd2b673227592d0697753767e4df088f7c2d93843828116fe59472bb9d604ba653400be32a538e985730844b1af4f42a7075e62049 + languageName: node + linkType: hard + +"@volar/source-map@npm:2.4.23": + version: 2.4.23 + resolution: "@volar/source-map@npm:2.4.23" + checksum: 10c0/08af690093b811d0a37bdd8d306755b4e7f1535b67625c26f6fa6eb9ae081e24c55dabc8231ce8856aa1b731a5ac137b3f0449b34c093923c3545afdbe462c7a + languageName: node + linkType: hard + +"@volar/typescript@npm:^2.4.11": + version: 2.4.23 + resolution: "@volar/typescript@npm:2.4.23" + dependencies: + "@volar/language-core": "npm:2.4.23" + path-browserify: "npm:^1.0.1" + vscode-uri: "npm:^3.0.8" + checksum: 10c0/dbb449b66e627a75f8f6df98b3210c32edff62747a12d1e6237a6dc2a75f26432833d4d3646d6fbd60ed21fa52d7e342437377973b80cf4bbeacee1980ffd0cb + languageName: node + linkType: hard + +"@vue/compiler-core@npm:3.5.22": + version: 3.5.22 + resolution: "@vue/compiler-core@npm:3.5.22" + dependencies: + "@babel/parser": "npm:^7.28.4" + "@vue/shared": "npm:3.5.22" + entities: "npm:^4.5.0" + estree-walker: "npm:^2.0.2" + source-map-js: "npm:^1.2.1" + checksum: 10c0/7575fdef8d2b69aa9a7f55ba237abe0ab86a855dba1048dc32b32e2e5212a66410f922603b1191a8fbbf6e0caee7efab0cea705516304eeb1108d3819a10b092 + languageName: node + linkType: hard + +"@vue/compiler-dom@npm:^3.5.0": + version: 3.5.22 + resolution: "@vue/compiler-dom@npm:3.5.22" + dependencies: + "@vue/compiler-core": "npm:3.5.22" + "@vue/shared": "npm:3.5.22" + checksum: 10c0/f853e7533a6e2f51321b5ce258c6ed2bdac8a294e833a61e87b00d3fdd36cd39e1045c03027c31d85f518422062e50085f1358a37d104ccf0866bc174a5c7b9a + languageName: node + linkType: hard + +"@vue/compiler-vue2@npm:^2.7.16": + version: 2.7.16 + resolution: "@vue/compiler-vue2@npm:2.7.16" + dependencies: + de-indent: "npm:^1.0.2" + he: "npm:^1.2.0" + checksum: 10c0/c76c3fad770b9a7da40b314116cc9da173da20e5fd68785c8ed8dd8a87d02f239545fa296e16552e040ec86b47bfb18283b39447b250c2e76e479bd6ae475bb3 + languageName: node + linkType: hard + +"@vue/language-core@npm:2.2.0": + version: 2.2.0 + resolution: "@vue/language-core@npm:2.2.0" + dependencies: + "@volar/language-core": "npm:~2.4.11" + "@vue/compiler-dom": "npm:^3.5.0" + "@vue/compiler-vue2": "npm:^2.7.16" + "@vue/shared": "npm:^3.5.0" + alien-signals: "npm:^0.4.9" + minimatch: "npm:^9.0.3" + muggle-string: "npm:^0.4.1" + path-browserify: "npm:^1.0.1" + peerDependencies: + typescript: "*" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/1c44cc4067266bbc825af358a867aed455963a08c160cd9df9a47571fd917a87d9de9bdea6149877e0c8309a6cf39f263e7cf2fbadeceba47a5a158f392151b2 + languageName: node + linkType: hard + +"@vue/shared@npm:3.5.22, @vue/shared@npm:^3.5.0": + version: 3.5.22 + resolution: "@vue/shared@npm:3.5.22" + checksum: 10c0/5866eab1dd6caa949f4ae2da2a7bac69612b35e316a298785279fb4de101bfe89a3572db56448aa35023b01d069b80a664be4fe22847ce5e5fbc1990e5970ec5 + languageName: node + linkType: hard + +"abbrev@npm:^3.0.0": + version: 3.0.1 + resolution: "abbrev@npm:3.0.1" + checksum: 10c0/21ba8f574ea57a3106d6d35623f2c4a9111d9ee3e9a5be47baed46ec2457d2eac46e07a5c4a60186f88cb98abbe3e24f2d4cca70bc2b12f1692523e2209a9ccf + languageName: node + linkType: hard + +"acorn-jsx@npm:^5.3.2": + version: 5.3.2 + resolution: "acorn-jsx@npm:5.3.2" + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + checksum: 10c0/4c54868fbef3b8d58927d5e33f0a4de35f59012fe7b12cf9dfbb345fb8f46607709e1c4431be869a23fb63c151033d84c4198fa9f79385cec34fcb1dd53974c1 + languageName: node + linkType: hard + +"acorn-walk@npm:^8.1.1": + version: 8.3.4 + resolution: "acorn-walk@npm:8.3.4" + dependencies: + acorn: "npm:^8.11.0" + checksum: 10c0/76537ac5fb2c37a64560feaf3342023dadc086c46da57da363e64c6148dc21b57d49ace26f949e225063acb6fb441eabffd89f7a3066de5ad37ab3e328927c62 + languageName: node + linkType: hard + +"acorn@npm:^8.11.0, acorn@npm:^8.15.0, acorn@npm:^8.4.1": + version: 8.15.0 + resolution: "acorn@npm:8.15.0" + bin: + acorn: bin/acorn + checksum: 10c0/dec73ff59b7d6628a01eebaece7f2bdb8bb62b9b5926dcad0f8931f2b8b79c2be21f6c68ac095592adb5adb15831a3635d9343e6a91d028bbe85d564875ec3ec + languageName: node + linkType: hard + +"agent-base@npm:^7.1.0, agent-base@npm:^7.1.2": + version: 7.1.4 + resolution: "agent-base@npm:7.1.4" + checksum: 10c0/c2c9ab7599692d594b6a161559ada307b7a624fa4c7b03e3afdb5a5e31cd0e53269115b620fcab024c5ac6a6f37fa5eb2e004f076ad30f5f7e6b8b671f7b35fe + languageName: node + linkType: hard + +"ajv-draft-04@npm:~1.0.0": + version: 1.0.0 + resolution: "ajv-draft-04@npm:1.0.0" + peerDependencies: + ajv: ^8.5.0 + peerDependenciesMeta: + ajv: + optional: true + checksum: 10c0/6044310bd38c17d77549fd326bd40ce1506fa10b0794540aa130180808bf94117fac8c9b448c621512bea60e4a947278f6a978e87f10d342950c15b33ddd9271 + languageName: node + linkType: hard + +"ajv-formats@npm:~3.0.1": + version: 3.0.1 + resolution: "ajv-formats@npm:3.0.1" + dependencies: + ajv: "npm:^8.0.0" + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + checksum: 10c0/168d6bca1ea9f163b41c8147bae537e67bd963357a5488a1eaf3abe8baa8eec806d4e45f15b10767e6020679315c7e1e5e6803088dfb84efa2b4e9353b83dd0a + languageName: node + linkType: hard + +"ajv@npm:^6.12.4": + version: 6.12.6 + resolution: "ajv@npm:6.12.6" + dependencies: + fast-deep-equal: "npm:^3.1.1" + fast-json-stable-stringify: "npm:^2.0.0" + json-schema-traverse: "npm:^0.4.1" + uri-js: "npm:^4.2.2" + checksum: 10c0/41e23642cbe545889245b9d2a45854ebba51cda6c778ebced9649420d9205f2efb39cb43dbc41e358409223b1ea43303ae4839db682c848b891e4811da1a5a71 + languageName: node + linkType: hard + +"ajv@npm:^8.0.0": + version: 8.17.1 + resolution: "ajv@npm:8.17.1" + dependencies: + fast-deep-equal: "npm:^3.1.3" + fast-uri: "npm:^3.0.1" + json-schema-traverse: "npm:^1.0.0" + require-from-string: "npm:^2.0.2" + checksum: 10c0/ec3ba10a573c6b60f94639ffc53526275917a2df6810e4ab5a6b959d87459f9ef3f00d5e7865b82677cb7d21590355b34da14d1d0b9c32d75f95a187e76fff35 + languageName: node + linkType: hard + +"ajv@npm:~8.12.0": + version: 8.12.0 + resolution: "ajv@npm:8.12.0" + dependencies: + fast-deep-equal: "npm:^3.1.1" + json-schema-traverse: "npm:^1.0.0" + require-from-string: "npm:^2.0.2" + uri-js: "npm:^4.2.2" + checksum: 10c0/ac4f72adf727ee425e049bc9d8b31d4a57e1c90da8d28bcd23d60781b12fcd6fc3d68db5df16994c57b78b94eed7988f5a6b482fd376dc5b084125e20a0a622e + languageName: node + linkType: hard + +"ajv@npm:~8.13.0": + version: 8.13.0 + resolution: "ajv@npm:8.13.0" + dependencies: + fast-deep-equal: "npm:^3.1.3" + json-schema-traverse: "npm:^1.0.0" + require-from-string: "npm:^2.0.2" + uri-js: "npm:^4.4.1" + checksum: 10c0/14c6497b6f72843986d7344175a1aa0e2c35b1e7f7475e55bc582cddb765fca7e6bf950f465dc7846f817776d9541b706f4b5b3fbedd8dfdeb5fce6f22864264 + languageName: node + linkType: hard + +"alien-signals@npm:^0.4.9": + version: 0.4.14 + resolution: "alien-signals@npm:0.4.14" + checksum: 10c0/5abb3377bcaf6b3819e950084b3ebd022ad90210105afb450c89dc347e80e28da441bf34858a57ea122abe7603e552ddbad80dc597c8f02a0a5206c5fb9c20cb + languageName: node + linkType: hard + +"ansi-regex@npm:^5.0.1": + version: 5.0.1 + resolution: "ansi-regex@npm:5.0.1" + checksum: 10c0/9a64bb8627b434ba9327b60c027742e5d17ac69277960d041898596271d992d4d52ba7267a63ca10232e29f6107fc8a835f6ce8d719b88c5f8493f8254813737 + languageName: node + linkType: hard + +"ansi-regex@npm:^6.0.1": + version: 6.2.2 + resolution: "ansi-regex@npm:6.2.2" + checksum: 10c0/05d4acb1d2f59ab2cf4b794339c7b168890d44dda4bf0ce01152a8da0213aca207802f930442ce8cd22d7a92f44907664aac6508904e75e038fa944d2601b30f + languageName: node + linkType: hard + +"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": + version: 4.3.0 + resolution: "ansi-styles@npm:4.3.0" + dependencies: + color-convert: "npm:^2.0.1" + checksum: 10c0/895a23929da416f2bd3de7e9cb4eabd340949328ab85ddd6e484a637d8f6820d485f53933446f5291c3b760cbc488beb8e88573dd0f9c7daf83dccc8fe81b041 + languageName: node + linkType: hard + +"ansi-styles@npm:^6.1.0": + version: 6.2.3 + resolution: "ansi-styles@npm:6.2.3" + checksum: 10c0/23b8a4ce14e18fb854693b95351e286b771d23d8844057ed2e7d083cd3e708376c3323707ec6a24365f7d7eda3ca00327fe04092e29e551499ec4c8b7bfac868 + languageName: node + linkType: hard + +"arg@npm:^4.1.0": + version: 4.1.3 + resolution: "arg@npm:4.1.3" + checksum: 10c0/070ff801a9d236a6caa647507bdcc7034530604844d64408149a26b9e87c2f97650055c0f049abd1efc024b334635c01f29e0b632b371ac3f26130f4cf65997a + languageName: node + linkType: hard + +"argparse@npm:^2.0.1": + version: 2.0.1 + resolution: "argparse@npm:2.0.1" + checksum: 10c0/c5640c2d89045371c7cedd6a70212a04e360fd34d6edeae32f6952c63949e3525ea77dbec0289d8213a99bbaeab5abfa860b5c12cf88a2e6cf8106e90dd27a7e + languageName: node + linkType: hard + +"argparse@npm:~1.0.9": + version: 1.0.10 + resolution: "argparse@npm:1.0.10" + dependencies: + sprintf-js: "npm:~1.0.2" + checksum: 10c0/b2972c5c23c63df66bca144dbc65d180efa74f25f8fd9b7d9a0a6c88ae839db32df3d54770dcb6460cf840d232b60695d1a6b1053f599d84e73f7437087712de + languageName: node + linkType: hard + +"array-buffer-byte-length@npm:^1.0.1, array-buffer-byte-length@npm:^1.0.2": + version: 1.0.2 + resolution: "array-buffer-byte-length@npm:1.0.2" + dependencies: + call-bound: "npm:^1.0.3" + is-array-buffer: "npm:^3.0.5" + checksum: 10c0/74e1d2d996941c7a1badda9cabb7caab8c449db9086407cad8a1b71d2604cc8abf105db8ca4e02c04579ec58b7be40279ddb09aea4784832984485499f48432d + languageName: node + linkType: hard + +"array-includes@npm:^3.1.9": + version: 3.1.9 + resolution: "array-includes@npm:3.1.9" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.4" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.24.0" + es-object-atoms: "npm:^1.1.1" + get-intrinsic: "npm:^1.3.0" + is-string: "npm:^1.1.1" + math-intrinsics: "npm:^1.1.0" + checksum: 10c0/0235fa69078abeac05ac4250699c44996bc6f774a9cbe45db48674ce6bd142f09b327d31482ff75cf03344db4ea03eae23edb862d59378b484b47ed842574856 + languageName: node + linkType: hard + +"array.prototype.findlastindex@npm:^1.2.6": + version: 1.2.6 + resolution: "array.prototype.findlastindex@npm:1.2.6" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.4" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.9" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.1.1" + es-shim-unscopables: "npm:^1.1.0" + checksum: 10c0/82559310d2e57ec5f8fc53d7df420e3abf0ba497935de0a5570586035478ba7d07618cb18e2d4ada2da514c8fb98a034aaf5c06caa0a57e2f7f4c4adedef5956 + languageName: node + linkType: hard + +"array.prototype.flat@npm:^1.3.3": + version: 1.3.3 + resolution: "array.prototype.flat@npm:1.3.3" + dependencies: + call-bind: "npm:^1.0.8" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.5" + es-shim-unscopables: "npm:^1.0.2" + checksum: 10c0/d90e04dfbc43bb96b3d2248576753d1fb2298d2d972e29ca7ad5ec621f0d9e16ff8074dae647eac4f31f4fb7d3f561a7ac005fb01a71f51705a13b5af06a7d8a + languageName: node + linkType: hard + +"array.prototype.flatmap@npm:^1.3.3": + version: 1.3.3 + resolution: "array.prototype.flatmap@npm:1.3.3" + dependencies: + call-bind: "npm:^1.0.8" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.5" + es-shim-unscopables: "npm:^1.0.2" + checksum: 10c0/ba899ea22b9dc9bf276e773e98ac84638ed5e0236de06f13d63a90b18ca9e0ec7c97d622d899796e3773930b946cd2413d098656c0c5d8cc58c6f25c21e6bd54 + languageName: node + linkType: hard + +"arraybuffer.prototype.slice@npm:^1.0.4": + version: 1.0.4 + resolution: "arraybuffer.prototype.slice@npm:1.0.4" + dependencies: + array-buffer-byte-length: "npm:^1.0.1" + call-bind: "npm:^1.0.8" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.5" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.6" + is-array-buffer: "npm:^3.0.4" + checksum: 10c0/2f2459caa06ae0f7f615003f9104b01f6435cc803e11bd2a655107d52a1781dc040532dc44d93026b694cc18793993246237423e13a5337e86b43ed604932c06 + languageName: node + linkType: hard + +"async-function@npm:^1.0.0": + version: 1.0.0 + resolution: "async-function@npm:1.0.0" + checksum: 10c0/669a32c2cb7e45091330c680e92eaeb791bc1d4132d827591e499cd1f776ff5a873e77e5f92d0ce795a8d60f10761dec9ddfe7225a5de680f5d357f67b1aac73 + languageName: node + linkType: hard + +"async-generator-function@npm:^1.0.0": + version: 1.0.0 + resolution: "async-generator-function@npm:1.0.0" + checksum: 10c0/2c50ef856c543ad500d8d8777d347e3c1ba623b93e99c9263ecc5f965c1b12d2a140e2ab6e43c3d0b85366110696f28114649411cbcd10b452a92a2318394186 + languageName: node + linkType: hard + +"async@npm:^3.2.6": + version: 3.2.6 + resolution: "async@npm:3.2.6" + checksum: 10c0/36484bb15ceddf07078688d95e27076379cc2f87b10c03b6dd8a83e89475a3c8df5848859dd06a4c95af1e4c16fc973de0171a77f18ea00be899aca2a4f85e70 + languageName: node + linkType: hard + +"available-typed-arrays@npm:^1.0.7": + version: 1.0.7 + resolution: "available-typed-arrays@npm:1.0.7" + dependencies: + possible-typed-array-names: "npm:^1.0.0" + checksum: 10c0/d07226ef4f87daa01bd0fe80f8f310982e345f372926da2e5296aecc25c41cab440916bbaa4c5e1034b453af3392f67df5961124e4b586df1e99793a1374bdb2 + languageName: node + linkType: hard + +"balanced-match@npm:^1.0.0": + version: 1.0.2 + resolution: "balanced-match@npm:1.0.2" + checksum: 10c0/9308baf0a7e4838a82bbfd11e01b1cb0f0cf2893bc1676c27c2a8c0e70cbae1c59120c3268517a8ae7fb6376b4639ef81ca22582611dbee4ed28df945134aaee + languageName: node + linkType: hard + +"balanced-match@npm:^4.0.2": + version: 4.0.4 + resolution: "balanced-match@npm:4.0.4" + checksum: 10c0/07e86102a3eb2ee2a6a1a89164f29d0dbaebd28f2ca3f5ca786f36b8b23d9e417eb3be45a4acf754f837be5ac0a2317de90d3fcb7f4f4dc95720a1f36b26a17b + languageName: node + linkType: hard + +"basic-auth@npm:^2.0.1": + version: 2.0.1 + resolution: "basic-auth@npm:2.0.1" + dependencies: + safe-buffer: "npm:5.1.2" + checksum: 10c0/05f56db3a0fc31c89c86b605231e32ee143fb6ae38dc60616bc0970ae6a0f034172def99e69d3aed0e2c9e7cac84e2d63bc51a0b5ff6ab5fc8808cc8b29923c1 + languageName: node + linkType: hard + +"before-after-hook@npm:^2.2.0": + version: 2.2.3 + resolution: "before-after-hook@npm:2.2.3" + checksum: 10c0/0488c4ae12df758ca9d49b3bb27b47fd559677965c52cae7b335784724fb8bf96c42b6e5ba7d7afcbc31facb0e294c3ef717cc41c5bc2f7bd9e76f8b90acd31c + languageName: node + linkType: hard + +"brace-expansion@npm:^1.1.7": + version: 1.1.12 + resolution: "brace-expansion@npm:1.1.12" + dependencies: + balanced-match: "npm:^1.0.0" + concat-map: "npm:0.0.1" + checksum: 10c0/975fecac2bb7758c062c20d0b3b6288c7cc895219ee25f0a64a9de662dbac981ff0b6e89909c3897c1f84fa353113a721923afdec5f8b2350255b097f12b1f73 + languageName: node + linkType: hard + +"brace-expansion@npm:^2.0.1": + version: 2.0.2 + resolution: "brace-expansion@npm:2.0.2" + dependencies: + balanced-match: "npm:^1.0.0" + checksum: 10c0/6d117a4c793488af86b83172deb6af143e94c17bc53b0b3cec259733923b4ca84679d506ac261f4ba3c7ed37c46018e2ff442f9ce453af8643ecd64f4a54e6cf + languageName: node + linkType: hard + +"brace-expansion@npm:^5.0.2": + version: 5.0.4 + resolution: "brace-expansion@npm:5.0.4" + dependencies: + balanced-match: "npm:^4.0.2" + checksum: 10c0/359cbcfa80b2eb914ca1f3440e92313fbfe7919ee6b274c35db55bec555aded69dac5ee78f102cec90c35f98c20fa43d10936d0cd9978158823c249257e1643a + languageName: node + linkType: hard + +"braces@npm:^3.0.3": + version: 3.0.3 + resolution: "braces@npm:3.0.3" + dependencies: + fill-range: "npm:^7.1.1" + checksum: 10c0/7c6dfd30c338d2997ba77500539227b9d1f85e388a5f43220865201e407e076783d0881f2d297b9f80951b4c957fcf0b51c1d2d24227631643c3f7c284b0aa04 + languageName: node + linkType: hard + +"browser-stdout@npm:^1.3.1": + version: 1.3.1 + resolution: "browser-stdout@npm:1.3.1" + checksum: 10c0/c40e482fd82be872b6ea7b9f7591beafbf6f5ba522fe3dade98ba1573a1c29a11101564993e4eb44e5488be8f44510af072df9a9637c739217eb155ceb639205 + languageName: node + linkType: hard + +"c8@npm:^11.0.0": + version: 11.0.0 + resolution: "c8@npm:11.0.0" + dependencies: + "@bcoe/v8-coverage": "npm:^1.0.1" + "@istanbuljs/schema": "npm:^0.1.3" + find-up: "npm:^5.0.0" + foreground-child: "npm:^3.1.1" + istanbul-lib-coverage: "npm:^3.2.0" + istanbul-lib-report: "npm:^3.0.1" + istanbul-reports: "npm:^3.1.6" + test-exclude: "npm:^8.0.0" + v8-to-istanbul: "npm:^9.0.0" + yargs: "npm:^17.7.2" + yargs-parser: "npm:^21.1.1" + peerDependencies: + monocart-coverage-reports: ^2 + peerDependenciesMeta: + monocart-coverage-reports: + optional: true + bin: + c8: bin/c8.js + checksum: 10c0/94b0cf8756715ca8fedb9331c61ebda0c5bbd63c5eeea523d18904af790f6f197a02f547c066fa2d8d0544bb9f9547a6a67d653f3575953139c74ca915771963 + languageName: node + linkType: hard + +"cacache@npm:^19.0.1": + version: 19.0.1 + resolution: "cacache@npm:19.0.1" + dependencies: + "@npmcli/fs": "npm:^4.0.0" + fs-minipass: "npm:^3.0.0" + glob: "npm:^10.2.2" + lru-cache: "npm:^10.0.1" + minipass: "npm:^7.0.3" + minipass-collect: "npm:^2.0.1" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + p-map: "npm:^7.0.2" + ssri: "npm:^12.0.0" + tar: "npm:^7.4.3" + unique-filename: "npm:^4.0.0" + checksum: 10c0/01f2134e1bd7d3ab68be851df96c8d63b492b1853b67f2eecb2c37bb682d37cb70bb858a16f2f0554d3c0071be6dfe21456a1ff6fa4b7eed996570d6a25ffe9c + languageName: node + linkType: hard + +"call-bind-apply-helpers@npm:^1.0.0, call-bind-apply-helpers@npm:^1.0.1, call-bind-apply-helpers@npm:^1.0.2": + version: 1.0.2 + resolution: "call-bind-apply-helpers@npm:1.0.2" + dependencies: + es-errors: "npm:^1.3.0" + function-bind: "npm:^1.1.2" + checksum: 10c0/47bd9901d57b857590431243fea704ff18078b16890a6b3e021e12d279bbf211d039155e27d7566b374d49ee1f8189344bac9833dec7a20cdec370506361c938 + languageName: node + linkType: hard + +"call-bind@npm:^1.0.7, call-bind@npm:^1.0.8": + version: 1.0.8 + resolution: "call-bind@npm:1.0.8" + dependencies: + call-bind-apply-helpers: "npm:^1.0.0" + es-define-property: "npm:^1.0.0" + get-intrinsic: "npm:^1.2.4" + set-function-length: "npm:^1.2.2" + checksum: 10c0/a13819be0681d915144467741b69875ae5f4eba8961eb0bf322aab63ec87f8250eb6d6b0dcbb2e1349876412a56129ca338592b3829ef4343527f5f18a0752d4 + languageName: node + linkType: hard + +"call-bound@npm:^1.0.2, call-bound@npm:^1.0.3, call-bound@npm:^1.0.4": + version: 1.0.4 + resolution: "call-bound@npm:1.0.4" + dependencies: + call-bind-apply-helpers: "npm:^1.0.2" + get-intrinsic: "npm:^1.3.0" + checksum: 10c0/f4796a6a0941e71c766aea672f63b72bc61234c4f4964dc6d7606e3664c307e7d77845328a8f3359ce39ddb377fed67318f9ee203dea1d47e46165dcf2917644 + languageName: node + linkType: hard + +"call-me-maybe@npm:^1.0.1": + version: 1.0.2 + resolution: "call-me-maybe@npm:1.0.2" + checksum: 10c0/8eff5dbb61141ebb236ed71b4e9549e488bcb5451c48c11e5667d5c75b0532303788a1101e6978cafa2d0c8c1a727805599c2741e3e0982855c9f1d78cd06c9f + languageName: node + linkType: hard + +"callsites@npm:^3.0.0": + version: 3.1.0 + resolution: "callsites@npm:3.1.0" + checksum: 10c0/fff92277400eb06c3079f9e74f3af120db9f8ea03bad0e84d9aede54bbe2d44a56cccb5f6cf12211f93f52306df87077ecec5b712794c5a9b5dac6d615a3f301 + languageName: node + linkType: hard + +"camelcase@npm:^6.0.0": + version: 6.3.0 + resolution: "camelcase@npm:6.3.0" + checksum: 10c0/0d701658219bd3116d12da3eab31acddb3f9440790c0792e0d398f0a520a6a4058018e546862b6fba89d7ae990efaeb97da71e1913e9ebf5a8b5621a3d55c710 + languageName: node + linkType: hard + +"chai@npm:^6.2.0": + version: 6.2.0 + resolution: "chai@npm:6.2.0" + checksum: 10c0/a4b7d7f5907187e09f1847afa838d6d1608adc7d822031b7900813c4ed5d9702911ac2468bf290676f22fddb3d727b1be90b57c1d0a69b902534ee29cdc6ff8a + languageName: node + linkType: hard + +"chalk@npm:4.1.2, chalk@npm:^4.0.0, chalk@npm:^4.1.0, chalk@npm:^4.1.2": + version: 4.1.2 + resolution: "chalk@npm:4.1.2" + dependencies: + ansi-styles: "npm:^4.1.0" + supports-color: "npm:^7.1.0" + checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880 + languageName: node + linkType: hard + +"chokidar@npm:^4.0.1": + version: 4.0.3 + resolution: "chokidar@npm:4.0.3" + dependencies: + readdirp: "npm:^4.0.1" + checksum: 10c0/a58b9df05bb452f7d105d9e7229ac82fa873741c0c40ddcc7bb82f8a909fbe3f7814c9ebe9bc9a2bef9b737c0ec6e2d699d179048ef06ad3ec46315df0ebe6ad + languageName: node + linkType: hard + +"chownr@npm:^3.0.0": + version: 3.0.0 + resolution: "chownr@npm:3.0.0" + checksum: 10c0/43925b87700f7e3893296c8e9c56cc58f926411cce3a6e5898136daaf08f08b9a8eb76d37d3267e707d0dcc17aed2e2ebdf5848c0c3ce95cf910a919935c1b10 + languageName: node + linkType: hard + +"cliui@npm:^8.0.1": + version: 8.0.1 + resolution: "cliui@npm:8.0.1" + dependencies: + string-width: "npm:^4.2.0" + strip-ansi: "npm:^6.0.1" + wrap-ansi: "npm:^7.0.0" + checksum: 10c0/4bda0f09c340cbb6dfdc1ed508b3ca080f12992c18d68c6be4d9cf51756033d5266e61ec57529e610dacbf4da1c634423b0c1b11037709cc6b09045cbd815df5 + languageName: node + linkType: hard + +"color-convert@npm:^2.0.1": + version: 2.0.1 + resolution: "color-convert@npm:2.0.1" + dependencies: + color-name: "npm:~1.1.4" + checksum: 10c0/37e1150172f2e311fe1b2df62c6293a342ee7380da7b9cfdba67ea539909afbd74da27033208d01d6d5cfc65ee7868a22e18d7e7648e004425441c0f8a15a7d7 + languageName: node + linkType: hard + +"color-name@npm:~1.1.4": + version: 1.1.4 + resolution: "color-name@npm:1.1.4" + checksum: 10c0/a1a3f914156960902f46f7f56bc62effc6c94e84b2cae157a526b1c1f74b677a47ec602bf68a61abfa2b42d15b7c5651c6dbe72a43af720bc588dff885b10f95 + languageName: node + linkType: hard + +"compare-versions@npm:^6.1.1": + version: 6.1.1 + resolution: "compare-versions@npm:6.1.1" + checksum: 10c0/415205c7627f9e4f358f571266422980c9fe2d99086be0c9a48008ef7c771f32b0fbe8e97a441ffedc3910872f917a0675fe0fe3c3b6d331cda6d8690be06338 + languageName: node + linkType: hard + +"concat-map@npm:0.0.1": + version: 0.0.1 + resolution: "concat-map@npm:0.0.1" + checksum: 10c0/c996b1cfdf95b6c90fee4dae37e332c8b6eb7d106430c17d538034c0ad9a1630cb194d2ab37293b1bdd4d779494beee7786d586a50bd9376fd6f7bcc2bd4c98f + languageName: node + linkType: hard + +"concurrently@npm:^9.2.1": + version: 9.2.1 + resolution: "concurrently@npm:9.2.1" + dependencies: + chalk: "npm:4.1.2" + rxjs: "npm:7.8.2" + shell-quote: "npm:1.8.3" + supports-color: "npm:8.1.1" + tree-kill: "npm:1.2.2" + yargs: "npm:17.7.2" + bin: + conc: dist/bin/concurrently.js + concurrently: dist/bin/concurrently.js + checksum: 10c0/da37f239f82eb7ac24f5ddb56259861e5f1d6da2ade7602b6ea7ad3101b13b5ccec02a77b7001402d1028ff2fdc38eed55644b32853ad5abf30e057002a963aa + languageName: node + linkType: hard + +"confbox@npm:^0.1.8": + version: 0.1.8 + resolution: "confbox@npm:0.1.8" + checksum: 10c0/fc2c68d97cb54d885b10b63e45bd8da83a8a71459d3ecf1825143dd4c7f9f1b696b3283e07d9d12a144c1301c2ebc7842380bdf0014e55acc4ae1c9550102418 + languageName: node + linkType: hard + +"confbox@npm:^0.2.2": + version: 0.2.2 + resolution: "confbox@npm:0.2.2" + checksum: 10c0/7c246588d533d31e8cdf66cb4701dff6de60f9be77ab54c0d0338e7988750ac56863cc0aca1b3f2046f45ff223a765d3e5d4977a7674485afcd37b6edf3fd129 + languageName: node + linkType: hard + +"convert-source-map@npm:^2.0.0": + version: 2.0.0 + resolution: "convert-source-map@npm:2.0.0" + checksum: 10c0/8f2f7a27a1a011cc6cc88cc4da2d7d0cfa5ee0369508baae3d98c260bb3ac520691464e5bbe4ae7cdf09860c1d69ecc6f70c63c6e7c7f7e3f18ec08484dc7d9b + languageName: node + linkType: hard + +"corser@npm:^2.0.1": + version: 2.0.1 + resolution: "corser@npm:2.0.1" + checksum: 10c0/1f319a752a560342dd22d936e5a4c158bfcbc332524ef5b05a7277236dad8b0b2868fd5cf818559f29954ec4d777d82e797fccd76601fcfe431610e4143c8acc + languageName: node + linkType: hard + +"create-require@npm:^1.1.0": + version: 1.1.1 + resolution: "create-require@npm:1.1.1" + checksum: 10c0/157cbc59b2430ae9a90034a5f3a1b398b6738bf510f713edc4d4e45e169bc514d3d99dd34d8d01ca7ae7830b5b8b537e46ae8f3c8f932371b0875c0151d7ec91 + languageName: node + linkType: hard + +"cross-spawn@npm:^7.0.3, cross-spawn@npm:^7.0.6": + version: 7.0.6 + resolution: "cross-spawn@npm:7.0.6" + dependencies: + path-key: "npm:^3.1.0" + shebang-command: "npm:^2.0.0" + which: "npm:^2.0.1" + checksum: 10c0/053ea8b2135caff68a9e81470e845613e374e7309a47731e81639de3eaeb90c3d01af0e0b44d2ab9d50b43467223b88567dfeb3262db942dc063b9976718ffc1 + languageName: node + linkType: hard + +"data-view-buffer@npm:^1.0.2": + version: 1.0.2 + resolution: "data-view-buffer@npm:1.0.2" + dependencies: + call-bound: "npm:^1.0.3" + es-errors: "npm:^1.3.0" + is-data-view: "npm:^1.0.2" + checksum: 10c0/7986d40fc7979e9e6241f85db8d17060dd9a71bd53c894fa29d126061715e322a4cd47a00b0b8c710394854183d4120462b980b8554012acc1c0fa49df7ad38c + languageName: node + linkType: hard + +"data-view-byte-length@npm:^1.0.2": + version: 1.0.2 + resolution: "data-view-byte-length@npm:1.0.2" + dependencies: + call-bound: "npm:^1.0.3" + es-errors: "npm:^1.3.0" + is-data-view: "npm:^1.0.2" + checksum: 10c0/f8a4534b5c69384d95ac18137d381f18a5cfae1f0fc1df0ef6feef51ef0d568606d970b69e02ea186c6c0f0eac77fe4e6ad96fec2569cc86c3afcc7475068c55 + languageName: node + linkType: hard + +"data-view-byte-offset@npm:^1.0.1": + version: 1.0.1 + resolution: "data-view-byte-offset@npm:1.0.1" + dependencies: + call-bound: "npm:^1.0.2" + es-errors: "npm:^1.3.0" + is-data-view: "npm:^1.0.1" + checksum: 10c0/fa7aa40078025b7810dcffc16df02c480573b7b53ef1205aa6a61533011005c1890e5ba17018c692ce7c900212b547262d33279fde801ad9843edc0863bf78c4 + languageName: node + linkType: hard + +"de-indent@npm:^1.0.2": + version: 1.0.2 + resolution: "de-indent@npm:1.0.2" + checksum: 10c0/7058ce58abd6dfc123dd204e36be3797abd419b59482a634605420f47ae97639d0c183ec5d1b904f308a01033f473673897afc2bd59bc620ebf1658763ef4291 + languageName: node + linkType: hard + +"debug@npm:4, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4, debug@npm:^4.3.5, debug@npm:^4.3.6, debug@npm:^4.4.0": + version: 4.4.3 + resolution: "debug@npm:4.4.3" + dependencies: + ms: "npm:^2.1.3" + peerDependenciesMeta: + supports-color: + optional: true + checksum: 10c0/d79136ec6c83ecbefd0f6a5593da6a9c91ec4d7ddc4b54c883d6e71ec9accb5f67a1a5e96d00a328196b5b5c86d365e98d8a3a70856aaf16b4e7b1985e67f5a6 + languageName: node + linkType: hard + +"debug@npm:^3.2.7": + version: 3.2.7 + resolution: "debug@npm:3.2.7" + dependencies: + ms: "npm:^2.1.1" + checksum: 10c0/37d96ae42cbc71c14844d2ae3ba55adf462ec89fd3a999459dec3833944cd999af6007ff29c780f1c61153bcaaf2c842d1e4ce1ec621e4fc4923244942e4a02a + languageName: node + linkType: hard + +"decamelize@npm:^4.0.0": + version: 4.0.0 + resolution: "decamelize@npm:4.0.0" + checksum: 10c0/e06da03fc05333e8cd2778c1487da67ffbea5b84e03ca80449519b8fa61f888714bbc6f459ea963d5641b4aa98832130eb5cd193d90ae9f0a27eee14be8e278d + languageName: node + linkType: hard + +"decode-uri-component@npm:^0.4.1": + version: 0.4.1 + resolution: "decode-uri-component@npm:0.4.1" + checksum: 10c0/a180bbdb5398ec8270d236a3ac07cb988bbf6097428481780b85840f088951dc0318a8d8f9d56796e1a322b55b29859cea29982f22f9b03af0bc60974c54e591 + languageName: node + linkType: hard + +"deep-is@npm:^0.1.3": + version: 0.1.4 + resolution: "deep-is@npm:0.1.4" + checksum: 10c0/7f0ee496e0dff14a573dc6127f14c95061b448b87b995fc96c017ce0a1e66af1675e73f1d6064407975bc4ea6ab679497a29fff7b5b9c4e99cb10797c1ad0b4c + languageName: node + linkType: hard + +"define-data-property@npm:^1.0.1, define-data-property@npm:^1.1.4": + version: 1.1.4 + resolution: "define-data-property@npm:1.1.4" + dependencies: + es-define-property: "npm:^1.0.0" + es-errors: "npm:^1.3.0" + gopd: "npm:^1.0.1" + checksum: 10c0/dea0606d1483eb9db8d930d4eac62ca0fa16738b0b3e07046cddfacf7d8c868bbe13fa0cb263eb91c7d0d527960dc3f2f2471a69ed7816210307f6744fe62e37 + languageName: node + linkType: hard + +"define-properties@npm:^1.2.1": + version: 1.2.1 + resolution: "define-properties@npm:1.2.1" + dependencies: + define-data-property: "npm:^1.0.1" + has-property-descriptors: "npm:^1.0.0" + object-keys: "npm:^1.1.1" + checksum: 10c0/88a152319ffe1396ccc6ded510a3896e77efac7a1bfbaa174a7b00414a1747377e0bb525d303794a47cf30e805c2ec84e575758512c6e44a993076d29fd4e6c3 + languageName: node + linkType: hard + +"deprecation@npm:^2.0.0": + version: 2.3.1 + resolution: "deprecation@npm:2.3.1" + checksum: 10c0/23d688ba66b74d09b908c40a76179418acbeeb0bfdf218c8075c58ad8d0c315130cb91aa3dffb623aa3a411a3569ce56c6460de6c8d69071c17fe6dd2442f032 + languageName: node + linkType: hard + +"diff@npm:^4.0.1": + version: 4.0.2 + resolution: "diff@npm:4.0.2" + checksum: 10c0/81b91f9d39c4eaca068eb0c1eb0e4afbdc5bb2941d197f513dd596b820b956fef43485876226d65d497bebc15666aa2aa82c679e84f65d5f2bfbf14ee46e32c1 + languageName: node + linkType: hard + +"diff@npm:^7.0.0": + version: 7.0.0 + resolution: "diff@npm:7.0.0" + checksum: 10c0/251fd15f85ffdf814cfc35a728d526b8d2ad3de338dcbd011ac6e57c461417090766b28995f8ff733135b5fbc3699c392db1d5e27711ac4e00244768cd1d577b + languageName: node + linkType: hard + +"doctrine@npm:^2.1.0": + version: 2.1.0 + resolution: "doctrine@npm:2.1.0" + dependencies: + esutils: "npm:^2.0.2" + checksum: 10c0/b6416aaff1f380bf56c3b552f31fdf7a69b45689368deca72d28636f41c16bb28ec3ebc40ace97db4c1afc0ceeb8120e8492fe0046841c94c2933b2e30a7d5ac + languageName: node + linkType: hard + +"dunder-proto@npm:^1.0.0, dunder-proto@npm:^1.0.1": + version: 1.0.1 + resolution: "dunder-proto@npm:1.0.1" + dependencies: + call-bind-apply-helpers: "npm:^1.0.1" + es-errors: "npm:^1.3.0" + gopd: "npm:^1.2.0" + checksum: 10c0/199f2a0c1c16593ca0a145dbf76a962f8033ce3129f01284d48c45ed4e14fea9bbacd7b3610b6cdc33486cef20385ac054948fefc6272fcce645c09468f93031 + languageName: node + linkType: hard + +"eastasianwidth@npm:^0.2.0": + version: 0.2.0 + resolution: "eastasianwidth@npm:0.2.0" + checksum: 10c0/26f364ebcdb6395f95124fda411f63137a4bfb5d3a06453f7f23dfe52502905bd84e0488172e0f9ec295fdc45f05c23d5d91baf16bd26f0fe9acd777a188dc39 + languageName: node + linkType: hard + +"emoji-regex@npm:^8.0.0": + version: 8.0.0 + resolution: "emoji-regex@npm:8.0.0" + checksum: 10c0/b6053ad39951c4cf338f9092d7bfba448cdfd46fe6a2a034700b149ac9ffbc137e361cbd3c442297f86bed2e5f7576c1b54cc0a6bf8ef5106cc62f496af35010 + languageName: node + linkType: hard + +"emoji-regex@npm:^9.2.2": + version: 9.2.2 + resolution: "emoji-regex@npm:9.2.2" + checksum: 10c0/af014e759a72064cf66e6e694a7fc6b0ed3d8db680427b021a89727689671cefe9d04151b2cad51dbaf85d5ba790d061cd167f1cf32eb7b281f6368b3c181639 + languageName: node + linkType: hard + +"encoding@npm:^0.1.13": + version: 0.1.13 + resolution: "encoding@npm:0.1.13" + dependencies: + iconv-lite: "npm:^0.6.2" + checksum: 10c0/36d938712ff00fe1f4bac88b43bcffb5930c1efa57bbcdca9d67e1d9d6c57cfb1200fb01efe0f3109b2ce99b231f90779532814a81370a1bd3274a0f58585039 + languageName: node + linkType: hard + +"entities@npm:^4.4.0, entities@npm:^4.5.0": + version: 4.5.0 + resolution: "entities@npm:4.5.0" + checksum: 10c0/5b039739f7621f5d1ad996715e53d964035f75ad3b9a4d38c6b3804bb226e282ffeae2443624d8fdd9c47d8e926ae9ac009c54671243f0c3294c26af7cc85250 + languageName: node + linkType: hard + +"env-paths@npm:^2.2.0": + version: 2.2.1 + resolution: "env-paths@npm:2.2.1" + checksum: 10c0/285325677bf00e30845e330eec32894f5105529db97496ee3f598478e50f008c5352a41a30e5e72ec9de8a542b5a570b85699cd63bd2bc646dbcb9f311d83bc4 + languageName: node + linkType: hard + +"err-code@npm:^2.0.2": + version: 2.0.3 + resolution: "err-code@npm:2.0.3" + checksum: 10c0/b642f7b4dd4a376e954947550a3065a9ece6733ab8e51ad80db727aaae0817c2e99b02a97a3d6cecc648a97848305e728289cf312d09af395403a90c9d4d8a66 + languageName: node + linkType: hard + +"es-abstract@npm:^1.23.2, es-abstract@npm:^1.23.5, es-abstract@npm:^1.23.9, es-abstract@npm:^1.24.0": + version: 1.24.0 + resolution: "es-abstract@npm:1.24.0" + dependencies: + array-buffer-byte-length: "npm:^1.0.2" + arraybuffer.prototype.slice: "npm:^1.0.4" + available-typed-arrays: "npm:^1.0.7" + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.4" + data-view-buffer: "npm:^1.0.2" + data-view-byte-length: "npm:^1.0.2" + data-view-byte-offset: "npm:^1.0.1" + es-define-property: "npm:^1.0.1" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.1.1" + es-set-tostringtag: "npm:^2.1.0" + es-to-primitive: "npm:^1.3.0" + function.prototype.name: "npm:^1.1.8" + get-intrinsic: "npm:^1.3.0" + get-proto: "npm:^1.0.1" + get-symbol-description: "npm:^1.1.0" + globalthis: "npm:^1.0.4" + gopd: "npm:^1.2.0" + has-property-descriptors: "npm:^1.0.2" + has-proto: "npm:^1.2.0" + has-symbols: "npm:^1.1.0" + hasown: "npm:^2.0.2" + internal-slot: "npm:^1.1.0" + is-array-buffer: "npm:^3.0.5" + is-callable: "npm:^1.2.7" + is-data-view: "npm:^1.0.2" + is-negative-zero: "npm:^2.0.3" + is-regex: "npm:^1.2.1" + is-set: "npm:^2.0.3" + is-shared-array-buffer: "npm:^1.0.4" + is-string: "npm:^1.1.1" + is-typed-array: "npm:^1.1.15" + is-weakref: "npm:^1.1.1" + math-intrinsics: "npm:^1.1.0" + object-inspect: "npm:^1.13.4" + object-keys: "npm:^1.1.1" + object.assign: "npm:^4.1.7" + own-keys: "npm:^1.0.1" + regexp.prototype.flags: "npm:^1.5.4" + safe-array-concat: "npm:^1.1.3" + safe-push-apply: "npm:^1.0.0" + safe-regex-test: "npm:^1.1.0" + set-proto: "npm:^1.0.0" + stop-iteration-iterator: "npm:^1.1.0" + string.prototype.trim: "npm:^1.2.10" + string.prototype.trimend: "npm:^1.0.9" + string.prototype.trimstart: "npm:^1.0.8" + typed-array-buffer: "npm:^1.0.3" + typed-array-byte-length: "npm:^1.0.3" + typed-array-byte-offset: "npm:^1.0.4" + typed-array-length: "npm:^1.0.7" + unbox-primitive: "npm:^1.1.0" + which-typed-array: "npm:^1.1.19" + checksum: 10c0/b256e897be32df5d382786ce8cce29a1dd8c97efbab77a26609bd70f2ed29fbcfc7a31758cb07488d532e7ccccdfca76c1118f2afe5a424cdc05ca007867c318 + languageName: node + linkType: hard + +"es-define-property@npm:^1.0.0, es-define-property@npm:^1.0.1": + version: 1.0.1 + resolution: "es-define-property@npm:1.0.1" + checksum: 10c0/3f54eb49c16c18707949ff25a1456728c883e81259f045003499efba399c08bad00deebf65cccde8c0e07908c1a225c9d472b7107e558f2a48e28d530e34527c + languageName: node + linkType: hard + +"es-errors@npm:^1.3.0": + version: 1.3.0 + resolution: "es-errors@npm:1.3.0" + checksum: 10c0/0a61325670072f98d8ae3b914edab3559b6caa980f08054a3b872052640d91da01d38df55df797fcc916389d77fc92b8d5906cf028f4db46d7e3003abecbca85 + languageName: node + linkType: hard + +"es-object-atoms@npm:^1.0.0, es-object-atoms@npm:^1.1.1": + version: 1.1.1 + resolution: "es-object-atoms@npm:1.1.1" + dependencies: + es-errors: "npm:^1.3.0" + checksum: 10c0/65364812ca4daf48eb76e2a3b7a89b3f6a2e62a1c420766ce9f692665a29d94fe41fe88b65f24106f449859549711e4b40d9fb8002d862dfd7eb1c512d10be0c + languageName: node + linkType: hard + +"es-set-tostringtag@npm:^2.1.0": + version: 2.1.0 + resolution: "es-set-tostringtag@npm:2.1.0" + dependencies: + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.6" + has-tostringtag: "npm:^1.0.2" + hasown: "npm:^2.0.2" + checksum: 10c0/ef2ca9ce49afe3931cb32e35da4dcb6d86ab02592cfc2ce3e49ced199d9d0bb5085fc7e73e06312213765f5efa47cc1df553a6a5154584b21448e9fb8355b1af + languageName: node + linkType: hard + +"es-shim-unscopables@npm:^1.0.2, es-shim-unscopables@npm:^1.1.0": + version: 1.1.0 + resolution: "es-shim-unscopables@npm:1.1.0" + dependencies: + hasown: "npm:^2.0.2" + checksum: 10c0/1b9702c8a1823fc3ef39035a4e958802cf294dd21e917397c561d0b3e195f383b978359816b1732d02b255ccf63e1e4815da0065b95db8d7c992037be3bbbcdb + languageName: node + linkType: hard + +"es-to-primitive@npm:^1.3.0": + version: 1.3.0 + resolution: "es-to-primitive@npm:1.3.0" + dependencies: + is-callable: "npm:^1.2.7" + is-date-object: "npm:^1.0.5" + is-symbol: "npm:^1.0.4" + checksum: 10c0/c7e87467abb0b438639baa8139f701a06537d2b9bc758f23e8622c3b42fd0fdb5bde0f535686119e446dd9d5e4c0f238af4e14960f4771877cf818d023f6730b + languageName: node + linkType: hard + +"esbuild@npm:^0.25.0, esbuild@npm:~0.25.0": + version: 0.25.10 + resolution: "esbuild@npm:0.25.10" + dependencies: + "@esbuild/aix-ppc64": "npm:0.25.10" + "@esbuild/android-arm": "npm:0.25.10" + "@esbuild/android-arm64": "npm:0.25.10" + "@esbuild/android-x64": "npm:0.25.10" + "@esbuild/darwin-arm64": "npm:0.25.10" + "@esbuild/darwin-x64": "npm:0.25.10" + "@esbuild/freebsd-arm64": "npm:0.25.10" + "@esbuild/freebsd-x64": "npm:0.25.10" + "@esbuild/linux-arm": "npm:0.25.10" + "@esbuild/linux-arm64": "npm:0.25.10" + "@esbuild/linux-ia32": "npm:0.25.10" + "@esbuild/linux-loong64": "npm:0.25.10" + "@esbuild/linux-mips64el": "npm:0.25.10" + "@esbuild/linux-ppc64": "npm:0.25.10" + "@esbuild/linux-riscv64": "npm:0.25.10" + "@esbuild/linux-s390x": "npm:0.25.10" + "@esbuild/linux-x64": "npm:0.25.10" + "@esbuild/netbsd-arm64": "npm:0.25.10" + "@esbuild/netbsd-x64": "npm:0.25.10" + "@esbuild/openbsd-arm64": "npm:0.25.10" + "@esbuild/openbsd-x64": "npm:0.25.10" + "@esbuild/openharmony-arm64": "npm:0.25.10" + "@esbuild/sunos-x64": "npm:0.25.10" + "@esbuild/win32-arm64": "npm:0.25.10" + "@esbuild/win32-ia32": "npm:0.25.10" + "@esbuild/win32-x64": "npm:0.25.10" + dependenciesMeta: + "@esbuild/aix-ppc64": + optional: true + "@esbuild/android-arm": + optional: true + "@esbuild/android-arm64": + optional: true + "@esbuild/android-x64": + optional: true + "@esbuild/darwin-arm64": + optional: true + "@esbuild/darwin-x64": + optional: true + "@esbuild/freebsd-arm64": + optional: true + "@esbuild/freebsd-x64": + optional: true + "@esbuild/linux-arm": + optional: true + "@esbuild/linux-arm64": + optional: true + "@esbuild/linux-ia32": + optional: true + "@esbuild/linux-loong64": + optional: true + "@esbuild/linux-mips64el": + optional: true + "@esbuild/linux-ppc64": + optional: true + "@esbuild/linux-riscv64": + optional: true + "@esbuild/linux-s390x": + optional: true + "@esbuild/linux-x64": + optional: true + "@esbuild/netbsd-arm64": + optional: true + "@esbuild/netbsd-x64": + optional: true + "@esbuild/openbsd-arm64": + optional: true + "@esbuild/openbsd-x64": + optional: true + "@esbuild/openharmony-arm64": + optional: true + "@esbuild/sunos-x64": + optional: true + "@esbuild/win32-arm64": + optional: true + "@esbuild/win32-ia32": + optional: true + "@esbuild/win32-x64": + optional: true + bin: + esbuild: bin/esbuild + checksum: 10c0/8ee5fdd43ed0d4092ce7f41577c63147f54049d5617763f0549c638bbe939e8adaa8f1a2728adb63417eb11df51956b7b0d8eb88ee08c27ad1d42960256158fa + languageName: node + linkType: hard + +"escalade@npm:^3.1.1": + version: 3.2.0 + resolution: "escalade@npm:3.2.0" + checksum: 10c0/ced4dd3a78e15897ed3be74e635110bbf3b08877b0a41be50dcb325ee0e0b5f65fc2d50e9845194d7c4633f327e2e1c6cce00a71b617c5673df0374201d67f65 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^4.0.0": + version: 4.0.0 + resolution: "escape-string-regexp@npm:4.0.0" + checksum: 10c0/9497d4dd307d845bd7f75180d8188bb17ea8c151c1edbf6b6717c100e104d629dc2dfb687686181b0f4b7d732c7dfdc4d5e7a8ff72de1b0ca283a75bbb3a9cd9 + languageName: node + linkType: hard + +"eslint-import-resolver-node@npm:^0.3.9": + version: 0.3.9 + resolution: "eslint-import-resolver-node@npm:0.3.9" + dependencies: + debug: "npm:^3.2.7" + is-core-module: "npm:^2.13.0" + resolve: "npm:^1.22.4" + checksum: 10c0/0ea8a24a72328a51fd95aa8f660dcca74c1429806737cf10261ab90cfcaaf62fd1eff664b76a44270868e0a932711a81b250053942595bcd00a93b1c1575dd61 + languageName: node + linkType: hard + +"eslint-module-utils@npm:^2.12.1": + version: 2.12.1 + resolution: "eslint-module-utils@npm:2.12.1" + dependencies: + debug: "npm:^3.2.7" + peerDependenciesMeta: + eslint: + optional: true + checksum: 10c0/6f4efbe7a91ae49bf67b4ab3644cb60bc5bd7db4cb5521de1b65be0847ffd3fb6bce0dd68f0995e1b312d137f768e2a1f842ee26fe73621afa05f850628fdc40 + languageName: node + linkType: hard + +"eslint-plugin-import@npm:^2.32.0": + version: 2.32.0 + resolution: "eslint-plugin-import@npm:2.32.0" + dependencies: + "@rtsao/scc": "npm:^1.1.0" + array-includes: "npm:^3.1.9" + array.prototype.findlastindex: "npm:^1.2.6" + array.prototype.flat: "npm:^1.3.3" + array.prototype.flatmap: "npm:^1.3.3" + debug: "npm:^3.2.7" + doctrine: "npm:^2.1.0" + eslint-import-resolver-node: "npm:^0.3.9" + eslint-module-utils: "npm:^2.12.1" + hasown: "npm:^2.0.2" + is-core-module: "npm:^2.16.1" + is-glob: "npm:^4.0.3" + minimatch: "npm:^3.1.2" + object.fromentries: "npm:^2.0.8" + object.groupby: "npm:^1.0.3" + object.values: "npm:^1.2.1" + semver: "npm:^6.3.1" + string.prototype.trimend: "npm:^1.0.9" + tsconfig-paths: "npm:^3.15.0" + peerDependencies: + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9 + checksum: 10c0/bfb1b8fc8800398e62ddfefbf3638d185286edfed26dfe00875cc2846d954491b4f5112457831588b757fa789384e1ae585f812614c4797f0499fa234fd4a48b + languageName: node + linkType: hard + +"eslint-scope@npm:^8.4.0": + version: 8.4.0 + resolution: "eslint-scope@npm:8.4.0" + dependencies: + esrecurse: "npm:^4.3.0" + estraverse: "npm:^5.2.0" + checksum: 10c0/407f6c600204d0f3705bd557f81bd0189e69cd7996f408f8971ab5779c0af733d1af2f1412066b40ee1588b085874fc37a2333986c6521669cdbdd36ca5058e0 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^3.4.3": + version: 3.4.3 + resolution: "eslint-visitor-keys@npm:3.4.3" + checksum: 10c0/92708e882c0a5ffd88c23c0b404ac1628cf20104a108c745f240a13c332a11aac54f49a22d5762efbffc18ecbc9a580d1b7ad034bf5f3cc3307e5cbff2ec9820 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^4.2.1": + version: 4.2.1 + resolution: "eslint-visitor-keys@npm:4.2.1" + checksum: 10c0/fcd43999199d6740db26c58dbe0c2594623e31ca307e616ac05153c9272f12f1364f5a0b1917a8e962268fdecc6f3622c1c2908b4fcc2e047a106fe6de69dc43 + languageName: node + linkType: hard + +"eslint@npm:^9.37.0": + version: 9.37.0 + resolution: "eslint@npm:9.37.0" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.8.0" + "@eslint-community/regexpp": "npm:^4.12.1" + "@eslint/config-array": "npm:^0.21.0" + "@eslint/config-helpers": "npm:^0.4.0" + "@eslint/core": "npm:^0.16.0" + "@eslint/eslintrc": "npm:^3.3.1" + "@eslint/js": "npm:9.37.0" + "@eslint/plugin-kit": "npm:^0.4.0" + "@humanfs/node": "npm:^0.16.6" + "@humanwhocodes/module-importer": "npm:^1.0.1" + "@humanwhocodes/retry": "npm:^0.4.2" + "@types/estree": "npm:^1.0.6" + "@types/json-schema": "npm:^7.0.15" + ajv: "npm:^6.12.4" + chalk: "npm:^4.0.0" + cross-spawn: "npm:^7.0.6" + debug: "npm:^4.3.2" + escape-string-regexp: "npm:^4.0.0" + eslint-scope: "npm:^8.4.0" + eslint-visitor-keys: "npm:^4.2.1" + espree: "npm:^10.4.0" + esquery: "npm:^1.5.0" + esutils: "npm:^2.0.2" + fast-deep-equal: "npm:^3.1.3" + file-entry-cache: "npm:^8.0.0" + find-up: "npm:^5.0.0" + glob-parent: "npm:^6.0.2" + ignore: "npm:^5.2.0" + imurmurhash: "npm:^0.1.4" + is-glob: "npm:^4.0.0" + json-stable-stringify-without-jsonify: "npm:^1.0.1" + lodash.merge: "npm:^4.6.2" + minimatch: "npm:^3.1.2" + natural-compare: "npm:^1.4.0" + optionator: "npm:^0.9.3" + peerDependencies: + jiti: "*" + peerDependenciesMeta: + jiti: + optional: true + bin: + eslint: bin/eslint.js + checksum: 10c0/30b71350b0e43542eeffa6f7380ed85c960055dde8003f17bf87d209a4a9afc6091bc0419aa32f86853e7ecef18790bdc4d678112b89dbebe61b69efcb1100e1 + languageName: node + linkType: hard + +"espree@npm:^10.0.1, espree@npm:^10.4.0": + version: 10.4.0 + resolution: "espree@npm:10.4.0" + dependencies: + acorn: "npm:^8.15.0" + acorn-jsx: "npm:^5.3.2" + eslint-visitor-keys: "npm:^4.2.1" + checksum: 10c0/c63fe06131c26c8157b4083313cb02a9a54720a08e21543300e55288c40e06c3fc284bdecf108d3a1372c5934a0a88644c98714f38b6ae8ed272b40d9ea08d6b + languageName: node + linkType: hard + +"esquery@npm:^1.5.0": + version: 1.6.0 + resolution: "esquery@npm:1.6.0" + dependencies: + estraverse: "npm:^5.1.0" + checksum: 10c0/cb9065ec605f9da7a76ca6dadb0619dfb611e37a81e318732977d90fab50a256b95fee2d925fba7c2f3f0523aa16f91587246693bc09bc34d5a59575fe6e93d2 + languageName: node + linkType: hard + +"esrecurse@npm:^4.3.0": + version: 4.3.0 + resolution: "esrecurse@npm:4.3.0" + dependencies: + estraverse: "npm:^5.2.0" + checksum: 10c0/81a37116d1408ded88ada45b9fb16dbd26fba3aadc369ce50fcaf82a0bac12772ebd7b24cd7b91fc66786bf2c1ac7b5f196bc990a473efff972f5cb338877cf5 + languageName: node + linkType: hard + +"estraverse@npm:^5.1.0, estraverse@npm:^5.2.0": + version: 5.3.0 + resolution: "estraverse@npm:5.3.0" + checksum: 10c0/1ff9447b96263dec95d6d67431c5e0771eb9776427421260a3e2f0fdd5d6bd4f8e37a7338f5ad2880c9f143450c9b1e4fc2069060724570a49cf9cf0312bd107 + languageName: node + linkType: hard + +"estree-walker@npm:^2.0.2": + version: 2.0.2 + resolution: "estree-walker@npm:2.0.2" + checksum: 10c0/53a6c54e2019b8c914dc395890153ffdc2322781acf4bd7d1a32d7aedc1710807bdcd866ac133903d5629ec601fbb50abe8c2e5553c7f5a0afdd9b6af6c945af + languageName: node + linkType: hard + +"esutils@npm:^2.0.2": + version: 2.0.3 + resolution: "esutils@npm:2.0.3" + checksum: 10c0/9a2fe69a41bfdade834ba7c42de4723c97ec776e40656919c62cbd13607c45e127a003f05f724a1ea55e5029a4cf2de444b13009f2af71271e42d93a637137c7 + languageName: node + linkType: hard + +"eventemitter3@npm:^4.0.0": + version: 4.0.7 + resolution: "eventemitter3@npm:4.0.7" + checksum: 10c0/5f6d97cbcbac47be798e6355e3a7639a84ee1f7d9b199a07017f1d2f1e2fe236004d14fa5dfaeba661f94ea57805385e326236a6debbc7145c8877fbc0297c6b + languageName: node + linkType: hard + +"exponential-backoff@npm:^3.1.1": + version: 3.1.2 + resolution: "exponential-backoff@npm:3.1.2" + checksum: 10c0/d9d3e1eafa21b78464297df91f1776f7fbaa3d5e3f7f0995648ca5b89c069d17055033817348d9f4a43d1c20b0eab84f75af6991751e839df53e4dfd6f22e844 + languageName: node + linkType: hard + +"exsolve@npm:^1.0.7": + version: 1.0.7 + resolution: "exsolve@npm:1.0.7" + checksum: 10c0/4479369d0bd84bb7e0b4f5d9bc18d26a89b6dbbbccd73f9d383d14892ef78ddbe159e01781055342f83dc00ebe90044036daf17ddf55cc21e2cac6609aa15631 + languageName: node + linkType: hard + +"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": + version: 3.1.3 + resolution: "fast-deep-equal@npm:3.1.3" + checksum: 10c0/40dedc862eb8992c54579c66d914635afbec43350afbbe991235fdcb4e3a8d5af1b23ae7e79bef7d4882d0ecee06c3197488026998fb19f72dc95acff1d1b1d0 + languageName: node + linkType: hard + +"fast-glob@npm:^3.3.2": + version: 3.3.3 + resolution: "fast-glob@npm:3.3.3" + dependencies: + "@nodelib/fs.stat": "npm:^2.0.2" + "@nodelib/fs.walk": "npm:^1.2.3" + glob-parent: "npm:^5.1.2" + merge2: "npm:^1.3.0" + micromatch: "npm:^4.0.8" + checksum: 10c0/f6aaa141d0d3384cf73cbcdfc52f475ed293f6d5b65bfc5def368b09163a9f7e5ec2b3014d80f733c405f58e470ee0cc451c2937685045cddcdeaa24199c43fe + languageName: node + linkType: hard + +"fast-json-stable-stringify@npm:^2.0.0": + version: 2.1.0 + resolution: "fast-json-stable-stringify@npm:2.1.0" + checksum: 10c0/7f081eb0b8a64e0057b3bb03f974b3ef00135fbf36c1c710895cd9300f13c94ba809bb3a81cf4e1b03f6e5285610a61abbd7602d0652de423144dfee5a389c9b + languageName: node + linkType: hard + +"fast-levenshtein@npm:^2.0.6": + version: 2.0.6 + resolution: "fast-levenshtein@npm:2.0.6" + checksum: 10c0/111972b37338bcb88f7d9e2c5907862c280ebf4234433b95bc611e518d192ccb2d38119c4ac86e26b668d75f7f3894f4ff5c4982899afced7ca78633b08287c4 + languageName: node + linkType: hard + +"fast-uri@npm:^3.0.1": + version: 3.1.0 + resolution: "fast-uri@npm:3.1.0" + checksum: 10c0/44364adca566f70f40d1e9b772c923138d47efeac2ae9732a872baafd77061f26b097ba2f68f0892885ad177becd065520412b8ffeec34b16c99433c5b9e2de7 + languageName: node + linkType: hard + +"fastq@npm:^1.6.0": + version: 1.19.1 + resolution: "fastq@npm:1.19.1" + dependencies: + reusify: "npm:^1.0.4" + checksum: 10c0/ebc6e50ac7048daaeb8e64522a1ea7a26e92b3cee5cd1c7f2316cdca81ba543aa40a136b53891446ea5c3a67ec215fbaca87ad405f102dd97012f62916905630 + languageName: node + linkType: hard + +"fdir@npm:^6.5.0": + version: 6.5.0 + resolution: "fdir@npm:6.5.0" + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + checksum: 10c0/e345083c4306b3aed6cb8ec551e26c36bab5c511e99ea4576a16750ddc8d3240e63826cc624f5ae17ad4dc82e68a253213b60d556c11bfad064b7607847ed07f + languageName: node + linkType: hard + +"file-entry-cache@npm:^8.0.0": + version: 8.0.0 + resolution: "file-entry-cache@npm:8.0.0" + dependencies: + flat-cache: "npm:^4.0.0" + checksum: 10c0/9e2b5938b1cd9b6d7e3612bdc533afd4ac17b2fc646569e9a8abbf2eb48e5eb8e316bc38815a3ef6a1b456f4107f0d0f055a614ca613e75db6bf9ff4d72c1638 + languageName: node + linkType: hard + +"fill-range@npm:^7.1.1": + version: 7.1.1 + resolution: "fill-range@npm:7.1.1" + dependencies: + to-regex-range: "npm:^5.0.1" + checksum: 10c0/b75b691bbe065472f38824f694c2f7449d7f5004aa950426a2c28f0306c60db9b880c0b0e4ed819997ffb882d1da02cfcfc819bddc94d71627f5269682edf018 + languageName: node + linkType: hard + +"filter-obj@npm:^5.1.0": + version: 5.1.0 + resolution: "filter-obj@npm:5.1.0" + checksum: 10c0/716e8ad2bc352e206556b3e5695b3cdff8aab80c53ea4b00c96315bbf467b987df3640575100aef8b84e812cf5ea4251db4cd672bbe33b1e78afea88400c67dd + languageName: node + linkType: hard + +"find-up@npm:^5.0.0": + version: 5.0.0 + resolution: "find-up@npm:5.0.0" + dependencies: + locate-path: "npm:^6.0.0" + path-exists: "npm:^4.0.0" + checksum: 10c0/062c5a83a9c02f53cdd6d175a37ecf8f87ea5bbff1fdfb828f04bfa021441bc7583e8ebc0872a4c1baab96221fb8a8a275a19809fb93fbc40bd69ec35634069a + languageName: node + linkType: hard + +"flat-cache@npm:^4.0.0": + version: 4.0.1 + resolution: "flat-cache@npm:4.0.1" + dependencies: + flatted: "npm:^3.2.9" + keyv: "npm:^4.5.4" + checksum: 10c0/2c59d93e9faa2523e4fda6b4ada749bed432cfa28c8e251f33b25795e426a1c6dbada777afb1f74fcfff33934fdbdea921ee738fcc33e71adc9d6eca984a1cfc + languageName: node + linkType: hard + +"flat@npm:^5.0.2": + version: 5.0.2 + resolution: "flat@npm:5.0.2" + bin: + flat: cli.js + checksum: 10c0/f178b13482f0cd80c7fede05f4d10585b1f2fdebf26e12edc138e32d3150c6ea6482b7f12813a1091143bad52bb6d3596bca51a162257a21163c0ff438baa5fe + languageName: node + linkType: hard + +"flatted@npm:^3.2.9": + version: 3.3.3 + resolution: "flatted@npm:3.3.3" + checksum: 10c0/e957a1c6b0254aa15b8cce8533e24165abd98fadc98575db082b786b5da1b7d72062b81bfdcd1da2f4d46b6ed93bec2434e62333e9b4261d79ef2e75a10dd538 + languageName: node + linkType: hard + +"follow-redirects@npm:^1.0.0": + version: 1.15.11 + resolution: "follow-redirects@npm:1.15.11" + peerDependenciesMeta: + debug: + optional: true + checksum: 10c0/d301f430542520a54058d4aeeb453233c564aaccac835d29d15e050beb33f339ad67d9bddbce01739c5dc46a6716dbe3d9d0d5134b1ca203effa11a7ef092343 + languageName: node + linkType: hard + +"for-each@npm:^0.3.3, for-each@npm:^0.3.5": + version: 0.3.5 + resolution: "for-each@npm:0.3.5" + dependencies: + is-callable: "npm:^1.2.7" + checksum: 10c0/0e0b50f6a843a282637d43674d1fb278dda1dd85f4f99b640024cfb10b85058aac0cc781bf689d5fe50b4b7f638e91e548560723a4e76e04fe96ae35ef039cee + languageName: node + linkType: hard + +"foreground-child@npm:^3.1.0, foreground-child@npm:^3.1.1, foreground-child@npm:^3.3.1": + version: 3.3.1 + resolution: "foreground-child@npm:3.3.1" + dependencies: + cross-spawn: "npm:^7.0.6" + signal-exit: "npm:^4.0.1" + checksum: 10c0/8986e4af2430896e65bc2788d6679067294d6aee9545daefc84923a0a4b399ad9c7a3ea7bd8c0b2b80fdf4a92de4c69df3f628233ff3224260e9c1541a9e9ed3 + languageName: node + linkType: hard + +"fs-extra@npm:~11.3.0": + version: 11.3.2 + resolution: "fs-extra@npm:11.3.2" + dependencies: + graceful-fs: "npm:^4.2.0" + jsonfile: "npm:^6.0.1" + universalify: "npm:^2.0.0" + checksum: 10c0/f5d629e1bb646d5dedb4d8b24c5aad3deb8cc1d5438979d6f237146cd10e113b49a949ae1b54212c2fbc98e2d0995f38009a9a1d0520f0287943335e65fe919b + languageName: node + linkType: hard + +"fs-minipass@npm:^3.0.0": + version: 3.0.3 + resolution: "fs-minipass@npm:3.0.3" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/63e80da2ff9b621e2cb1596abcb9207f1cf82b968b116ccd7b959e3323144cce7fb141462200971c38bbf2ecca51695069db45265705bed09a7cd93ae5b89f94 + languageName: node + linkType: hard + +"fsevents@npm:~2.3.2, fsevents@npm:~2.3.3": + version: 2.3.3 + resolution: "fsevents@npm:2.3.3" + dependencies: + node-gyp: "npm:latest" + checksum: 10c0/a1f0c44595123ed717febbc478aa952e47adfc28e2092be66b8ab1635147254ca6cfe1df792a8997f22716d4cbafc73309899ff7bfac2ac3ad8cf2e4ecc3ec60 + conditions: os=darwin + languageName: node + linkType: hard + +"fsevents@patch:fsevents@npm%3A~2.3.2#optional!builtin, fsevents@patch:fsevents@npm%3A~2.3.3#optional!builtin": + version: 2.3.3 + resolution: "fsevents@patch:fsevents@npm%3A2.3.3#optional!builtin::version=2.3.3&hash=df0bf1" + dependencies: + node-gyp: "npm:latest" + conditions: os=darwin + languageName: node + linkType: hard + +"function-bind@npm:^1.1.2": + version: 1.1.2 + resolution: "function-bind@npm:1.1.2" + checksum: 10c0/d8680ee1e5fcd4c197e4ac33b2b4dce03c71f4d91717292785703db200f5c21f977c568d28061226f9b5900cbcd2c84463646134fd5337e7925e0942bc3f46d5 + languageName: node + linkType: hard + +"function.prototype.name@npm:^1.1.6, function.prototype.name@npm:^1.1.8": + version: 1.1.8 + resolution: "function.prototype.name@npm:1.1.8" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.3" + define-properties: "npm:^1.2.1" + functions-have-names: "npm:^1.2.3" + hasown: "npm:^2.0.2" + is-callable: "npm:^1.2.7" + checksum: 10c0/e920a2ab52663005f3cbe7ee3373e3c71c1fb5558b0b0548648cdf3e51961085032458e26c71ff1a8c8c20e7ee7caeb03d43a5d1fa8610c459333323a2e71253 + languageName: node + linkType: hard + +"functions-have-names@npm:^1.2.3": + version: 1.2.3 + resolution: "functions-have-names@npm:1.2.3" + checksum: 10c0/33e77fd29bddc2d9bb78ab3eb854c165909201f88c75faa8272e35899e2d35a8a642a15e7420ef945e1f64a9670d6aa3ec744106b2aa42be68ca5114025954ca + languageName: node + linkType: hard + +"generator-function@npm:^2.0.0": + version: 2.0.1 + resolution: "generator-function@npm:2.0.1" + checksum: 10c0/8a9f59df0f01cfefafdb3b451b80555e5cf6d76487095db91ac461a0e682e4ff7a9dbce15f4ecec191e53586d59eece01949e05a4b4492879600bbbe8e28d6b8 + languageName: node + linkType: hard + +"get-caller-file@npm:^2.0.5": + version: 2.0.5 + resolution: "get-caller-file@npm:2.0.5" + checksum: 10c0/c6c7b60271931fa752aeb92f2b47e355eac1af3a2673f47c9589e8f8a41adc74d45551c1bc57b5e66a80609f10ffb72b6f575e4370d61cc3f7f3aaff01757cde + languageName: node + linkType: hard + +"get-intrinsic@npm:^1.2.4, get-intrinsic@npm:^1.2.5, get-intrinsic@npm:^1.2.6, get-intrinsic@npm:^1.2.7, get-intrinsic@npm:^1.3.0": + version: 1.3.1 + resolution: "get-intrinsic@npm:1.3.1" + dependencies: + async-function: "npm:^1.0.0" + async-generator-function: "npm:^1.0.0" + call-bind-apply-helpers: "npm:^1.0.2" + es-define-property: "npm:^1.0.1" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.1.1" + function-bind: "npm:^1.1.2" + generator-function: "npm:^2.0.0" + get-proto: "npm:^1.0.1" + gopd: "npm:^1.2.0" + has-symbols: "npm:^1.1.0" + hasown: "npm:^2.0.2" + math-intrinsics: "npm:^1.1.0" + checksum: 10c0/9f4ab0cf7efe0fd2c8185f52e6f637e708f3a112610c88869f8f041bb9ecc2ce44bf285dfdbdc6f4f7c277a5b88d8e94a432374d97cca22f3de7fc63795deb5d + languageName: node + linkType: hard + +"get-proto@npm:^1.0.1": + version: 1.0.1 + resolution: "get-proto@npm:1.0.1" + dependencies: + dunder-proto: "npm:^1.0.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/9224acb44603c5526955e83510b9da41baf6ae73f7398875fba50edc5e944223a89c4a72b070fcd78beb5f7bdda58ecb6294adc28f7acfc0da05f76a2399643c + languageName: node + linkType: hard + +"get-symbol-description@npm:^1.1.0": + version: 1.1.0 + resolution: "get-symbol-description@npm:1.1.0" + dependencies: + call-bound: "npm:^1.0.3" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.6" + checksum: 10c0/d6a7d6afca375779a4b307738c9e80dbf7afc0bdbe5948768d54ab9653c865523d8920e670991a925936eb524b7cb6a6361d199a760b21d0ca7620194455aa4b + languageName: node + linkType: hard + +"get-tsconfig@npm:^4.7.5": + version: 4.12.0 + resolution: "get-tsconfig@npm:4.12.0" + dependencies: + resolve-pkg-maps: "npm:^1.0.0" + checksum: 10c0/3438106bd46bfc6595fce6117190f1ac0998de2e6916b40ec23b20c784b0b47e79ea2b920895b9ed26029b1f80b8867626fb24795d5f45abbdab716a4ba1ef92 + languageName: node + linkType: hard + +"glob-parent@npm:^5.1.2": + version: 5.1.2 + resolution: "glob-parent@npm:5.1.2" + dependencies: + is-glob: "npm:^4.0.1" + checksum: 10c0/cab87638e2112bee3f839ef5f6e0765057163d39c66be8ec1602f3823da4692297ad4e972de876ea17c44d652978638d2fd583c6713d0eb6591706825020c9ee + languageName: node + linkType: hard + +"glob-parent@npm:^6.0.2": + version: 6.0.2 + resolution: "glob-parent@npm:6.0.2" + dependencies: + is-glob: "npm:^4.0.3" + checksum: 10c0/317034d88654730230b3f43bb7ad4f7c90257a426e872ea0bf157473ac61c99bf5d205fad8f0185f989be8d2fa6d3c7dce1645d99d545b6ea9089c39f838e7f8 + languageName: node + linkType: hard + +"glob@npm:^10.2.2, glob@npm:^10.4.5": + version: 10.4.5 + resolution: "glob@npm:10.4.5" + dependencies: + foreground-child: "npm:^3.1.0" + jackspeak: "npm:^3.1.2" + minimatch: "npm:^9.0.4" + minipass: "npm:^7.1.2" + package-json-from-dist: "npm:^1.0.0" + path-scurry: "npm:^1.11.1" + bin: + glob: dist/esm/bin.mjs + checksum: 10c0/19a9759ea77b8e3ca0a43c2f07ecddc2ad46216b786bb8f993c445aee80d345925a21e5280c7b7c6c59e860a0154b84e4b2b60321fea92cd3c56b4a7489f160e + languageName: node + linkType: hard + +"glob@npm:^11.0.0": + version: 11.0.3 + resolution: "glob@npm:11.0.3" + dependencies: + foreground-child: "npm:^3.3.1" + jackspeak: "npm:^4.1.1" + minimatch: "npm:^10.0.3" + minipass: "npm:^7.1.2" + package-json-from-dist: "npm:^1.0.0" + path-scurry: "npm:^2.0.0" + bin: + glob: dist/esm/bin.mjs + checksum: 10c0/7d24457549ec2903920dfa3d8e76850e7c02aa709122f0164b240c712f5455c0b457e6f2a1eee39344c6148e39895be8094ae8cfef7ccc3296ed30bce250c661 + languageName: node + linkType: hard + +"glob@npm:^13.0.6": + version: 13.0.6 + resolution: "glob@npm:13.0.6" + dependencies: + minimatch: "npm:^10.2.2" + minipass: "npm:^7.1.3" + path-scurry: "npm:^2.0.2" + checksum: 10c0/269c236f11a9b50357fe7a8c6aadac667e01deb5242b19c84975628f05f4438d8ee1354bb62c5d6c10f37fd59911b54d7799730633a2786660d8c69f1d18120a + languageName: node + linkType: hard + +"globals@npm:^14.0.0": + version: 14.0.0 + resolution: "globals@npm:14.0.0" + checksum: 10c0/b96ff42620c9231ad468d4c58ff42afee7777ee1c963013ff8aabe095a451d0ceeb8dcd8ef4cbd64d2538cef45f787a78ba3a9574f4a634438963e334471302d + languageName: node + linkType: hard + +"globalthis@npm:^1.0.4": + version: 1.0.4 + resolution: "globalthis@npm:1.0.4" + dependencies: + define-properties: "npm:^1.2.1" + gopd: "npm:^1.0.1" + checksum: 10c0/9d156f313af79d80b1566b93e19285f481c591ad6d0d319b4be5e03750d004dde40a39a0f26f7e635f9007a3600802f53ecd85a759b86f109e80a5f705e01846 + languageName: node + linkType: hard + +"gopd@npm:^1.0.1, gopd@npm:^1.2.0": + version: 1.2.0 + resolution: "gopd@npm:1.2.0" + checksum: 10c0/50fff1e04ba2b7737c097358534eacadad1e68d24cccee3272e04e007bed008e68d2614f3987788428fd192a5ae3889d08fb2331417e4fc4a9ab366b2043cead + languageName: node + linkType: hard + +"graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.6": + version: 4.2.11 + resolution: "graceful-fs@npm:4.2.11" + checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 + languageName: node + linkType: hard + +"graphemer@npm:^1.4.0": + version: 1.4.0 + resolution: "graphemer@npm:1.4.0" + checksum: 10c0/e951259d8cd2e0d196c72ec711add7115d42eb9a8146c8eeda5b8d3ac91e5dd816b9cd68920726d9fd4490368e7ed86e9c423f40db87e2d8dfafa00fa17c3a31 + languageName: node + linkType: hard + +"has-bigints@npm:^1.0.2": + version: 1.1.0 + resolution: "has-bigints@npm:1.1.0" + checksum: 10c0/2de0cdc4a1ccf7a1e75ffede1876994525ac03cc6f5ae7392d3415dd475cd9eee5bceec63669ab61aa997ff6cceebb50ef75561c7002bed8988de2b9d1b40788 + languageName: node + linkType: hard + +"has-flag@npm:^4.0.0": + version: 4.0.0 + resolution: "has-flag@npm:4.0.0" + checksum: 10c0/2e789c61b7888d66993e14e8331449e525ef42aac53c627cc53d1c3334e768bcb6abdc4f5f0de1478a25beec6f0bd62c7549058b7ac53e924040d4f301f02fd1 + languageName: node + linkType: hard + +"has-property-descriptors@npm:^1.0.0, has-property-descriptors@npm:^1.0.2": + version: 1.0.2 + resolution: "has-property-descriptors@npm:1.0.2" + dependencies: + es-define-property: "npm:^1.0.0" + checksum: 10c0/253c1f59e80bb476cf0dde8ff5284505d90c3bdb762983c3514d36414290475fe3fd6f574929d84de2a8eec00d35cf07cb6776205ff32efd7c50719125f00236 + languageName: node + linkType: hard + +"has-proto@npm:^1.2.0": + version: 1.2.0 + resolution: "has-proto@npm:1.2.0" + dependencies: + dunder-proto: "npm:^1.0.0" + checksum: 10c0/46538dddab297ec2f43923c3d35237df45d8c55a6fc1067031e04c13ed8a9a8f94954460632fd4da84c31a1721eefee16d901cbb1ae9602bab93bb6e08f93b95 + languageName: node + linkType: hard + +"has-symbols@npm:^1.0.3, has-symbols@npm:^1.1.0": + version: 1.1.0 + resolution: "has-symbols@npm:1.1.0" + checksum: 10c0/dde0a734b17ae51e84b10986e651c664379018d10b91b6b0e9b293eddb32f0f069688c841fb40f19e9611546130153e0a2a48fd7f512891fb000ddfa36f5a20e + languageName: node + linkType: hard + +"has-tostringtag@npm:^1.0.2": + version: 1.0.2 + resolution: "has-tostringtag@npm:1.0.2" + dependencies: + has-symbols: "npm:^1.0.3" + checksum: 10c0/a8b166462192bafe3d9b6e420a1d581d93dd867adb61be223a17a8d6dad147aa77a8be32c961bb2f27b3ef893cae8d36f564ab651f5e9b7938ae86f74027c48c + languageName: node + linkType: hard + +"hasown@npm:^2.0.2": + version: 2.0.2 + resolution: "hasown@npm:2.0.2" + dependencies: + function-bind: "npm:^1.1.2" + checksum: 10c0/3769d434703b8ac66b209a4cca0737519925bbdb61dd887f93a16372b14694c63ff4e797686d87c90f08168e81082248b9b028bad60d4da9e0d1148766f56eb9 + languageName: node + linkType: hard + +"he@npm:^1.2.0": + version: 1.2.0 + resolution: "he@npm:1.2.0" + bin: + he: bin/he + checksum: 10c0/a27d478befe3c8192f006cdd0639a66798979dfa6e2125c6ac582a19a5ebfec62ad83e8382e6036170d873f46e4536a7e795bf8b95bf7c247f4cc0825ccc8c17 + languageName: node + linkType: hard + +"html-encoding-sniffer@npm:^3.0.0": + version: 3.0.0 + resolution: "html-encoding-sniffer@npm:3.0.0" + dependencies: + whatwg-encoding: "npm:^2.0.0" + checksum: 10c0/b17b3b0fb5d061d8eb15121c3b0b536376c3e295ecaf09ba48dd69c6b6c957839db124fe1e2b3f11329753a4ee01aa7dedf63b7677999e86da17fbbdd82c5386 + languageName: node + linkType: hard + +"html-escaper@npm:^2.0.0": + version: 2.0.2 + resolution: "html-escaper@npm:2.0.2" + checksum: 10c0/208e8a12de1a6569edbb14544f4567e6ce8ecc30b9394fcaa4e7bb1e60c12a7c9a1ed27e31290817157e8626f3a4f29e76c8747030822eb84a6abb15c255f0a0 + languageName: node + linkType: hard + +"http-cache-semantics@npm:^4.1.1": + version: 4.2.0 + resolution: "http-cache-semantics@npm:4.2.0" + checksum: 10c0/45b66a945cf13ec2d1f29432277201313babf4a01d9e52f44b31ca923434083afeca03f18417f599c9ab3d0e7b618ceb21257542338b57c54b710463b4a53e37 + languageName: node + linkType: hard + +"http-proxy-agent@npm:^7.0.0": + version: 7.0.2 + resolution: "http-proxy-agent@npm:7.0.2" + dependencies: + agent-base: "npm:^7.1.0" + debug: "npm:^4.3.4" + checksum: 10c0/4207b06a4580fb85dd6dff521f0abf6db517489e70863dca1a0291daa7f2d3d2d6015a57bd702af068ea5cf9f1f6ff72314f5f5b4228d299c0904135d2aef921 + languageName: node + linkType: hard + +"http-proxy@npm:^1.18.1": + version: 1.18.1 + resolution: "http-proxy@npm:1.18.1" + dependencies: + eventemitter3: "npm:^4.0.0" + follow-redirects: "npm:^1.0.0" + requires-port: "npm:^1.0.0" + checksum: 10c0/148dfa700a03fb421e383aaaf88ac1d94521dfc34072f6c59770528c65250983c2e4ec996f2f03aa9f3fe46cd1270a593126068319311e3e8d9e610a37533e94 + languageName: node + linkType: hard + +"http-server@npm:^14.1.1": + version: 14.1.1 + resolution: "http-server@npm:14.1.1" + dependencies: + basic-auth: "npm:^2.0.1" + chalk: "npm:^4.1.2" + corser: "npm:^2.0.1" + he: "npm:^1.2.0" + html-encoding-sniffer: "npm:^3.0.0" + http-proxy: "npm:^1.18.1" + mime: "npm:^1.6.0" + minimist: "npm:^1.2.6" + opener: "npm:^1.5.1" + portfinder: "npm:^1.0.28" + secure-compare: "npm:3.0.1" + union: "npm:~0.5.0" + url-join: "npm:^4.0.1" + bin: + http-server: bin/http-server + checksum: 10c0/c5770ddd722dd520ce0af25efee6bfb7c6300ff4e934636d4eec83fa995739e64de2e699e89e7a795b3a1894bcc37bec226617c1023600aacd7871fd8d6ffe6d + languageName: node + linkType: hard + +"https-proxy-agent@npm:^7.0.1": + version: 7.0.6 + resolution: "https-proxy-agent@npm:7.0.6" + dependencies: + agent-base: "npm:^7.1.2" + debug: "npm:4" + checksum: 10c0/f729219bc735edb621fa30e6e84e60ee5d00802b8247aac0d7b79b0bd6d4b3294737a337b93b86a0bd9e68099d031858a39260c976dc14cdbba238ba1f8779ac + languageName: node + linkType: hard + +"iconv-lite@npm:0.6.3, iconv-lite@npm:^0.6.2": + version: 0.6.3 + resolution: "iconv-lite@npm:0.6.3" + dependencies: + safer-buffer: "npm:>= 2.1.2 < 3.0.0" + checksum: 10c0/98102bc66b33fcf5ac044099d1257ba0b7ad5e3ccd3221f34dd508ab4070edff183276221684e1e0555b145fce0850c9f7d2b60a9fcac50fbb4ea0d6e845a3b1 + languageName: node + linkType: hard + +"ignore@npm:^5.2.0, ignore@npm:^5.3.1": + version: 5.3.2 + resolution: "ignore@npm:5.3.2" + checksum: 10c0/f9f652c957983634ded1e7f02da3b559a0d4cc210fca3792cb67f1b153623c9c42efdc1c4121af171e295444459fc4a9201101fb041b1104a3c000bccb188337 + languageName: node + linkType: hard + +"ignore@npm:^7.0.0": + version: 7.0.5 + resolution: "ignore@npm:7.0.5" + checksum: 10c0/ae00db89fe873064a093b8999fe4cc284b13ef2a178636211842cceb650b9c3e390d3339191acb145d81ed5379d2074840cf0c33a20bdbd6f32821f79eb4ad5d + languageName: node + linkType: hard + +"import-fresh@npm:^3.2.1": + version: 3.3.1 + resolution: "import-fresh@npm:3.3.1" + dependencies: + parent-module: "npm:^1.0.0" + resolve-from: "npm:^4.0.0" + checksum: 10c0/bf8cc494872fef783249709385ae883b447e3eb09db0ebd15dcead7d9afe7224dad7bd7591c6b73b0b19b3c0f9640eb8ee884f01cfaf2887ab995b0b36a0cbec + languageName: node + linkType: hard + +"import-lazy@npm:~4.0.0": + version: 4.0.0 + resolution: "import-lazy@npm:4.0.0" + checksum: 10c0/a3520313e2c31f25c0b06aa66d167f329832b68a4f957d7c9daf6e0fa41822b6e84948191648b9b9d8ca82f94740cdf15eecf2401a5b42cd1c33fd84f2225cca + languageName: node + linkType: hard + +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 10c0/8b51313850dd33605c6c9d3fd9638b714f4c4c40250cff658209f30d40da60f78992fb2df5dabee4acf589a6a82bbc79ad5486550754bd9ec4e3fc0d4a57d6a6 + languageName: node + linkType: hard + +"internal-slot@npm:^1.1.0": + version: 1.1.0 + resolution: "internal-slot@npm:1.1.0" + dependencies: + es-errors: "npm:^1.3.0" + hasown: "npm:^2.0.2" + side-channel: "npm:^1.1.0" + checksum: 10c0/03966f5e259b009a9bf1a78d60da920df198af4318ec004f57b8aef1dd3fe377fbc8cce63a96e8c810010302654de89f9e19de1cd8ad0061d15be28a695465c7 + languageName: node + linkType: hard + +"ip-address@npm:^10.0.1": + version: 10.0.1 + resolution: "ip-address@npm:10.0.1" + checksum: 10c0/1634d79dae18394004775cb6d699dc46b7c23df6d2083164025a2b15240c1164fccde53d0e08bd5ee4fc53913d033ab6b5e395a809ad4b956a940c446e948843 + languageName: node + linkType: hard + +"is-array-buffer@npm:^3.0.4, is-array-buffer@npm:^3.0.5": + version: 3.0.5 + resolution: "is-array-buffer@npm:3.0.5" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.3" + get-intrinsic: "npm:^1.2.6" + checksum: 10c0/c5c9f25606e86dbb12e756694afbbff64bc8b348d1bc989324c037e1068695131930199d6ad381952715dad3a9569333817f0b1a72ce5af7f883ce802e49c83d + languageName: node + linkType: hard + +"is-async-function@npm:^2.0.0": + version: 2.1.1 + resolution: "is-async-function@npm:2.1.1" + dependencies: + async-function: "npm:^1.0.0" + call-bound: "npm:^1.0.3" + get-proto: "npm:^1.0.1" + has-tostringtag: "npm:^1.0.2" + safe-regex-test: "npm:^1.1.0" + checksum: 10c0/d70c236a5e82de6fc4d44368ffd0c2fee2b088b893511ce21e679da275a5ecc6015ff59a7d7e1bdd7ca39f71a8dbdd253cf8cce5c6b3c91cdd5b42b5ce677298 + languageName: node + linkType: hard + +"is-bigint@npm:^1.1.0": + version: 1.1.0 + resolution: "is-bigint@npm:1.1.0" + dependencies: + has-bigints: "npm:^1.0.2" + checksum: 10c0/f4f4b905ceb195be90a6ea7f34323bf1c18e3793f18922e3e9a73c684c29eeeeff5175605c3a3a74cc38185fe27758f07efba3dbae812e5c5afbc0d2316b40e4 + languageName: node + linkType: hard + +"is-boolean-object@npm:^1.2.1": + version: 1.2.2 + resolution: "is-boolean-object@npm:1.2.2" + dependencies: + call-bound: "npm:^1.0.3" + has-tostringtag: "npm:^1.0.2" + checksum: 10c0/36ff6baf6bd18b3130186990026f5a95c709345c39cd368468e6c1b6ab52201e9fd26d8e1f4c066357b4938b0f0401e1a5000e08257787c1a02f3a719457001e + languageName: node + linkType: hard + +"is-callable@npm:^1.2.7": + version: 1.2.7 + resolution: "is-callable@npm:1.2.7" + checksum: 10c0/ceebaeb9d92e8adee604076971dd6000d38d6afc40bb843ea8e45c5579b57671c3f3b50d7f04869618242c6cee08d1b67806a8cb8edaaaf7c0748b3720d6066f + languageName: node + linkType: hard + +"is-core-module@npm:^2.13.0, is-core-module@npm:^2.16.0, is-core-module@npm:^2.16.1": + version: 2.16.1 + resolution: "is-core-module@npm:2.16.1" + dependencies: + hasown: "npm:^2.0.2" + checksum: 10c0/898443c14780a577e807618aaae2b6f745c8538eca5c7bc11388a3f2dc6de82b9902bcc7eb74f07be672b11bbe82dd6a6edded44a00cb3d8f933d0459905eedd + languageName: node + linkType: hard + +"is-data-view@npm:^1.0.1, is-data-view@npm:^1.0.2": + version: 1.0.2 + resolution: "is-data-view@npm:1.0.2" + dependencies: + call-bound: "npm:^1.0.2" + get-intrinsic: "npm:^1.2.6" + is-typed-array: "npm:^1.1.13" + checksum: 10c0/ef3548a99d7e7f1370ce21006baca6d40c73e9f15c941f89f0049c79714c873d03b02dae1c64b3f861f55163ecc16da06506c5b8a1d4f16650b3d9351c380153 + languageName: node + linkType: hard + +"is-date-object@npm:^1.0.5, is-date-object@npm:^1.1.0": + version: 1.1.0 + resolution: "is-date-object@npm:1.1.0" + dependencies: + call-bound: "npm:^1.0.2" + has-tostringtag: "npm:^1.0.2" + checksum: 10c0/1a4d199c8e9e9cac5128d32e6626fa7805175af9df015620ac0d5d45854ccf348ba494679d872d37301032e35a54fc7978fba1687e8721b2139aea7870cafa2f + languageName: node + linkType: hard + +"is-extglob@npm:^2.1.1": + version: 2.1.1 + resolution: "is-extglob@npm:2.1.1" + checksum: 10c0/5487da35691fbc339700bbb2730430b07777a3c21b9ebaecb3072512dfd7b4ba78ac2381a87e8d78d20ea08affb3f1971b4af629173a6bf435ff8a4c47747912 + languageName: node + linkType: hard + +"is-finalizationregistry@npm:^1.1.0": + version: 1.1.1 + resolution: "is-finalizationregistry@npm:1.1.1" + dependencies: + call-bound: "npm:^1.0.3" + checksum: 10c0/818dff679b64f19e228a8205a1e2d09989a98e98def3a817f889208cfcbf918d321b251aadf2c05918194803ebd2eb01b14fc9d0b2bea53d984f4137bfca5e97 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^3.0.0": + version: 3.0.0 + resolution: "is-fullwidth-code-point@npm:3.0.0" + checksum: 10c0/bb11d825e049f38e04c06373a8d72782eee0205bda9d908cc550ccb3c59b99d750ff9537982e01733c1c94a58e35400661f57042158ff5e8f3e90cf936daf0fc + languageName: node + linkType: hard + +"is-generator-function@npm:^1.0.10": + version: 1.1.2 + resolution: "is-generator-function@npm:1.1.2" + dependencies: + call-bound: "npm:^1.0.4" + generator-function: "npm:^2.0.0" + get-proto: "npm:^1.0.1" + has-tostringtag: "npm:^1.0.2" + safe-regex-test: "npm:^1.1.0" + checksum: 10c0/83da102e89c3e3b71d67b51d47c9f9bc862bceb58f87201727e27f7fa19d1d90b0ab223644ecaee6fc6e3d2d622bb25c966fbdaf87c59158b01ce7c0fe2fa372 + languageName: node + linkType: hard + +"is-glob@npm:^4.0.0, is-glob@npm:^4.0.1, is-glob@npm:^4.0.3": + version: 4.0.3 + resolution: "is-glob@npm:4.0.3" + dependencies: + is-extglob: "npm:^2.1.1" + checksum: 10c0/17fb4014e22be3bbecea9b2e3a76e9e34ff645466be702f1693e8f1ee1adac84710d0be0bd9f967d6354036fd51ab7c2741d954d6e91dae6bb69714de92c197a + languageName: node + linkType: hard + +"is-map@npm:^2.0.3": + version: 2.0.3 + resolution: "is-map@npm:2.0.3" + checksum: 10c0/2c4d431b74e00fdda7162cd8e4b763d6f6f217edf97d4f8538b94b8702b150610e2c64961340015fe8df5b1fcee33ccd2e9b62619c4a8a3a155f8de6d6d355fc + languageName: node + linkType: hard + +"is-negative-zero@npm:^2.0.3": + version: 2.0.3 + resolution: "is-negative-zero@npm:2.0.3" + checksum: 10c0/bcdcf6b8b9714063ffcfa9929c575ac69bfdabb8f4574ff557dfc086df2836cf07e3906f5bbc4f2a5c12f8f3ba56af640c843cdfc74da8caed86c7c7d66fd08e + languageName: node + linkType: hard + +"is-number-object@npm:^1.1.1": + version: 1.1.1 + resolution: "is-number-object@npm:1.1.1" + dependencies: + call-bound: "npm:^1.0.3" + has-tostringtag: "npm:^1.0.2" + checksum: 10c0/97b451b41f25135ff021d85c436ff0100d84a039bb87ffd799cbcdbea81ef30c464ced38258cdd34f080be08fc3b076ca1f472086286d2aa43521d6ec6a79f53 + languageName: node + linkType: hard + +"is-number@npm:^7.0.0": + version: 7.0.0 + resolution: "is-number@npm:7.0.0" + checksum: 10c0/b4686d0d3053146095ccd45346461bc8e53b80aeb7671cc52a4de02dbbf7dc0d1d2a986e2fe4ae206984b4d34ef37e8b795ebc4f4295c978373e6575e295d811 + languageName: node + linkType: hard + +"is-path-inside@npm:^3.0.3": + version: 3.0.3 + resolution: "is-path-inside@npm:3.0.3" + checksum: 10c0/cf7d4ac35fb96bab6a1d2c3598fe5ebb29aafb52c0aaa482b5a3ed9d8ba3edc11631e3ec2637660c44b3ce0e61a08d54946e8af30dec0b60a7c27296c68ffd05 + languageName: node + linkType: hard + +"is-plain-obj@npm:^2.1.0": + version: 2.1.0 + resolution: "is-plain-obj@npm:2.1.0" + checksum: 10c0/e5c9814cdaa627a9ad0a0964ded0e0491bfd9ace405c49a5d63c88b30a162f1512c069d5b80997893c4d0181eadc3fed02b4ab4b81059aba5620bfcdfdeb9c53 + languageName: node + linkType: hard + +"is-regex@npm:^1.2.1": + version: 1.2.1 + resolution: "is-regex@npm:1.2.1" + dependencies: + call-bound: "npm:^1.0.2" + gopd: "npm:^1.2.0" + has-tostringtag: "npm:^1.0.2" + hasown: "npm:^2.0.2" + checksum: 10c0/1d3715d2b7889932349241680032e85d0b492cfcb045acb75ffc2c3085e8d561184f1f7e84b6f8321935b4aea39bc9c6ba74ed595b57ce4881a51dfdbc214e04 + languageName: node + linkType: hard + +"is-set@npm:^2.0.3": + version: 2.0.3 + resolution: "is-set@npm:2.0.3" + checksum: 10c0/f73732e13f099b2dc879c2a12341cfc22ccaca8dd504e6edae26484bd5707a35d503fba5b4daad530a9b088ced1ae6c9d8200fd92e09b428fe14ea79ce8080b7 + languageName: node + linkType: hard + +"is-shared-array-buffer@npm:^1.0.4": + version: 1.0.4 + resolution: "is-shared-array-buffer@npm:1.0.4" + dependencies: + call-bound: "npm:^1.0.3" + checksum: 10c0/65158c2feb41ff1edd6bbd6fd8403a69861cf273ff36077982b5d4d68e1d59278c71691216a4a64632bd76d4792d4d1d2553901b6666d84ade13bba5ea7bc7db + languageName: node + linkType: hard + +"is-string@npm:^1.1.1": + version: 1.1.1 + resolution: "is-string@npm:1.1.1" + dependencies: + call-bound: "npm:^1.0.3" + has-tostringtag: "npm:^1.0.2" + checksum: 10c0/2f518b4e47886bb81567faba6ffd0d8a8333cf84336e2e78bf160693972e32ad00fe84b0926491cc598dee576fdc55642c92e62d0cbe96bf36f643b6f956f94d + languageName: node + linkType: hard + +"is-symbol@npm:^1.0.4, is-symbol@npm:^1.1.1": + version: 1.1.1 + resolution: "is-symbol@npm:1.1.1" + dependencies: + call-bound: "npm:^1.0.2" + has-symbols: "npm:^1.1.0" + safe-regex-test: "npm:^1.1.0" + checksum: 10c0/f08f3e255c12442e833f75a9e2b84b2d4882fdfd920513cf2a4a2324f0a5b076c8fd913778e3ea5d258d5183e9d92c0cd20e04b03ab3df05316b049b2670af1e + languageName: node + linkType: hard + +"is-typed-array@npm:^1.1.13, is-typed-array@npm:^1.1.14, is-typed-array@npm:^1.1.15": + version: 1.1.15 + resolution: "is-typed-array@npm:1.1.15" + dependencies: + which-typed-array: "npm:^1.1.16" + checksum: 10c0/415511da3669e36e002820584e264997ffe277ff136643a3126cc949197e6ca3334d0f12d084e83b1994af2e9c8141275c741cf2b7da5a2ff62dd0cac26f76c4 + languageName: node + linkType: hard + +"is-unicode-supported@npm:^0.1.0": + version: 0.1.0 + resolution: "is-unicode-supported@npm:0.1.0" + checksum: 10c0/00cbe3455c3756be68d2542c416cab888aebd5012781d6819749fefb15162ff23e38501fe681b3d751c73e8ff561ac09a5293eba6f58fdf0178462ce6dcb3453 + languageName: node + linkType: hard + +"is-weakmap@npm:^2.0.2": + version: 2.0.2 + resolution: "is-weakmap@npm:2.0.2" + checksum: 10c0/443c35bb86d5e6cc5929cd9c75a4024bb0fff9586ed50b092f94e700b89c43a33b186b76dbc6d54f3d3d09ece689ab38dcdc1af6a482cbe79c0f2da0a17f1299 + languageName: node + linkType: hard + +"is-weakref@npm:^1.0.2, is-weakref@npm:^1.1.1": + version: 1.1.1 + resolution: "is-weakref@npm:1.1.1" + dependencies: + call-bound: "npm:^1.0.3" + checksum: 10c0/8e0a9c07b0c780949a100e2cab2b5560a48ecd4c61726923c1a9b77b6ab0aa0046c9e7fb2206042296817045376dee2c8ab1dabe08c7c3dfbf195b01275a085b + languageName: node + linkType: hard + +"is-weakset@npm:^2.0.3": + version: 2.0.4 + resolution: "is-weakset@npm:2.0.4" + dependencies: + call-bound: "npm:^1.0.3" + get-intrinsic: "npm:^1.2.6" + checksum: 10c0/6491eba08acb8dc9532da23cb226b7d0192ede0b88f16199e592e4769db0a077119c1f5d2283d1e0d16d739115f70046e887e477eb0e66cd90e1bb29f28ba647 + languageName: node + linkType: hard + +"isarray@npm:^2.0.5": + version: 2.0.5 + resolution: "isarray@npm:2.0.5" + checksum: 10c0/4199f14a7a13da2177c66c31080008b7124331956f47bca57dd0b6ea9f11687aa25e565a2c7a2b519bc86988d10398e3049a1f5df13c9f6b7664154690ae79fd + languageName: node + linkType: hard + +"isbinaryfile@npm:^5.0.2": + version: 5.0.7 + resolution: "isbinaryfile@npm:5.0.7" + checksum: 10c0/4cd98a91aaf969d7cae91f74d041dd1df35d9e140c522b7879180035f7eab9ba9c0c3d678e00e72a2777ee7245fd8f20b60c0787132c5fdbf6fc113492325e11 + languageName: node + linkType: hard + +"isexe@npm:^2.0.0": + version: 2.0.0 + resolution: "isexe@npm:2.0.0" + checksum: 10c0/228cfa503fadc2c31596ab06ed6aa82c9976eec2bfd83397e7eaf06d0ccf42cd1dfd6743bf9aeb01aebd4156d009994c5f76ea898d2832c1fe342da923ca457d + languageName: node + linkType: hard + +"isexe@npm:^3.1.1": + version: 3.1.1 + resolution: "isexe@npm:3.1.1" + checksum: 10c0/9ec257654093443eb0a528a9c8cbba9c0ca7616ccb40abd6dde7202734d96bb86e4ac0d764f0f8cd965856aacbff2f4ce23e730dc19dfb41e3b0d865ca6fdcc7 + languageName: node + linkType: hard + +"istanbul-lib-coverage@npm:^3.0.0, istanbul-lib-coverage@npm:^3.2.0": + version: 3.2.2 + resolution: "istanbul-lib-coverage@npm:3.2.2" + checksum: 10c0/6c7ff2106769e5f592ded1fb418f9f73b4411fd5a084387a5410538332b6567cd1763ff6b6cadca9b9eb2c443cce2f7ea7d7f1b8d315f9ce58539793b1e0922b + languageName: node + linkType: hard + +"istanbul-lib-report@npm:^3.0.0, istanbul-lib-report@npm:^3.0.1": + version: 3.0.1 + resolution: "istanbul-lib-report@npm:3.0.1" + dependencies: + istanbul-lib-coverage: "npm:^3.0.0" + make-dir: "npm:^4.0.0" + supports-color: "npm:^7.1.0" + checksum: 10c0/84323afb14392de8b6a5714bd7e9af845cfbd56cfe71ed276cda2f5f1201aea673c7111901227ee33e68e4364e288d73861eb2ed48f6679d1e69a43b6d9b3ba7 + languageName: node + linkType: hard + +"istanbul-reports@npm:^3.1.6": + version: 3.2.0 + resolution: "istanbul-reports@npm:3.2.0" + dependencies: + html-escaper: "npm:^2.0.0" + istanbul-lib-report: "npm:^3.0.0" + checksum: 10c0/d596317cfd9c22e1394f22a8d8ba0303d2074fe2e971887b32d870e4b33f8464b10f8ccbe6847808f7db485f084eba09e6c2ed706b3a978e4b52f07085b8f9bc + languageName: node + linkType: hard + +"jackspeak@npm:^3.1.2": + version: 3.4.3 + resolution: "jackspeak@npm:3.4.3" + dependencies: + "@isaacs/cliui": "npm:^8.0.2" + "@pkgjs/parseargs": "npm:^0.11.0" + dependenciesMeta: + "@pkgjs/parseargs": + optional: true + checksum: 10c0/6acc10d139eaefdbe04d2f679e6191b3abf073f111edf10b1de5302c97ec93fffeb2fdd8681ed17f16268aa9dd4f8c588ed9d1d3bffbbfa6e8bf897cbb3149b9 + languageName: node + linkType: hard + +"jackspeak@npm:^4.1.1": + version: 4.1.1 + resolution: "jackspeak@npm:4.1.1" + dependencies: + "@isaacs/cliui": "npm:^8.0.2" + checksum: 10c0/84ec4f8e21d6514db24737d9caf65361511f75e5e424980eebca4199f400874f45e562ac20fa8aeb1dd20ca2f3f81f0788b6e9c3e64d216a5794fd6f30e0e042 + languageName: node + linkType: hard + +"jju@npm:~1.4.0": + version: 1.4.0 + resolution: "jju@npm:1.4.0" + checksum: 10c0/f3f444557e4364cfc06b1abf8331bf3778b26c0c8552ca54429bc0092652172fdea26cbffe33e1017b303d5aa506f7ede8571857400efe459cb7439180e2acad + languageName: node + linkType: hard + +"js-yaml@npm:^4.1.0": + version: 4.1.0 + resolution: "js-yaml@npm:4.1.0" + dependencies: + argparse: "npm:^2.0.1" + bin: + js-yaml: bin/js-yaml.js + checksum: 10c0/184a24b4eaacfce40ad9074c64fd42ac83cf74d8c8cd137718d456ced75051229e5061b8633c3366b8aada17945a7a356b337828c19da92b51ae62126575018f + languageName: node + linkType: hard + +"json-buffer@npm:3.0.1": + version: 3.0.1 + resolution: "json-buffer@npm:3.0.1" + checksum: 10c0/0d1c91569d9588e7eef2b49b59851f297f3ab93c7b35c7c221e288099322be6b562767d11e4821da500f3219542b9afd2e54c5dc573107c1126ed1080f8e96d7 + languageName: node + linkType: hard + +"json-schema-traverse@npm:^0.4.1": + version: 0.4.1 + resolution: "json-schema-traverse@npm:0.4.1" + checksum: 10c0/108fa90d4cc6f08243aedc6da16c408daf81793bf903e9fd5ab21983cda433d5d2da49e40711da016289465ec2e62e0324dcdfbc06275a607fe3233fde4942ce + languageName: node + linkType: hard + +"json-schema-traverse@npm:^1.0.0": + version: 1.0.0 + resolution: "json-schema-traverse@npm:1.0.0" + checksum: 10c0/71e30015d7f3d6dc1c316d6298047c8ef98a06d31ad064919976583eb61e1018a60a0067338f0f79cabc00d84af3fcc489bd48ce8a46ea165d9541ba17fb30c6 + languageName: node + linkType: hard + +"json-stable-stringify-without-jsonify@npm:^1.0.1": + version: 1.0.1 + resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" + checksum: 10c0/cb168b61fd4de83e58d09aaa6425ef71001bae30d260e2c57e7d09a5fd82223e2f22a042dedaab8db23b7d9ae46854b08bb1f91675a8be11c5cffebef5fb66a5 + languageName: node + linkType: hard + +"json5@npm:^1.0.2": + version: 1.0.2 + resolution: "json5@npm:1.0.2" + dependencies: + minimist: "npm:^1.2.0" + bin: + json5: lib/cli.js + checksum: 10c0/9ee316bf21f000b00752e6c2a3b79ecf5324515a5c60ee88983a1910a45426b643a4f3461657586e8aeca87aaf96f0a519b0516d2ae527a6c3e7eed80f68717f + languageName: node + linkType: hard + +"jsonfile@npm:^6.0.1": + version: 6.2.0 + resolution: "jsonfile@npm:6.2.0" + dependencies: + graceful-fs: "npm:^4.1.6" + universalify: "npm:^2.0.0" + dependenciesMeta: + graceful-fs: + optional: true + checksum: 10c0/7f4f43b08d1869ded8a6822213d13ae3b99d651151d77efd1557ced0889c466296a7d9684e397bd126acf5eb2cfcb605808c3e681d0fdccd2fe5a04b47e76c0d + languageName: node + linkType: hard + +"keyv@npm:^4.5.4": + version: 4.5.4 + resolution: "keyv@npm:4.5.4" + dependencies: + json-buffer: "npm:3.0.1" + checksum: 10c0/aa52f3c5e18e16bb6324876bb8b59dd02acf782a4b789c7b2ae21107fab95fab3890ed448d4f8dba80ce05391eeac4bfabb4f02a20221342982f806fa2cf271e + languageName: node + linkType: hard + +"kolorist@npm:^1.8.0": + version: 1.8.0 + resolution: "kolorist@npm:1.8.0" + checksum: 10c0/73075db44a692bf6c34a649f3b4b3aea4993b84f6b754cbf7a8577e7c7db44c0bad87752bd23b0ce533f49de2244ce2ce03b7b1b667a85ae170a94782cc50f9b + languageName: node + linkType: hard + +"levn@npm:^0.4.1": + version: 0.4.1 + resolution: "levn@npm:0.4.1" + dependencies: + prelude-ls: "npm:^1.2.1" + type-check: "npm:~0.4.0" + checksum: 10c0/effb03cad7c89dfa5bd4f6989364bfc79994c2042ec5966cb9b95990e2edee5cd8969ddf42616a0373ac49fac1403437deaf6e9050fbbaa3546093a59b9ac94e + languageName: node + linkType: hard + +"linkify-it@npm:^5.0.0": + version: 5.0.0 + resolution: "linkify-it@npm:5.0.0" + dependencies: + uc.micro: "npm:^2.0.0" + checksum: 10c0/ff4abbcdfa2003472fc3eb4b8e60905ec97718e11e33cca52059919a4c80cc0e0c2a14d23e23d8c00e5402bc5a885cdba8ca053a11483ab3cc8b3c7a52f88e2d + languageName: node + linkType: hard + +"local-pkg@npm:^1.0.0": + version: 1.1.2 + resolution: "local-pkg@npm:1.1.2" + dependencies: + mlly: "npm:^1.7.4" + pkg-types: "npm:^2.3.0" + quansync: "npm:^0.2.11" + checksum: 10c0/1bcfcc5528dea95cba3caa478126a348d3985aad9f69ecf7802c13efef90897e1c5ff7851974332c5e6d4a4698efe610fef758a068c8bc3feb5322aeb35d5993 + languageName: node + linkType: hard + +"locate-path@npm:^6.0.0": + version: 6.0.0 + resolution: "locate-path@npm:6.0.0" + dependencies: + p-locate: "npm:^5.0.0" + checksum: 10c0/d3972ab70dfe58ce620e64265f90162d247e87159b6126b01314dd67be43d50e96a50b517bce2d9452a79409c7614054c277b5232377de50416564a77ac7aad3 + languageName: node + linkType: hard + +"lodash.merge@npm:^4.6.2": + version: 4.6.2 + resolution: "lodash.merge@npm:4.6.2" + checksum: 10c0/402fa16a1edd7538de5b5903a90228aa48eb5533986ba7fa26606a49db2572bf414ff73a2c9f5d5fd36b31c46a5d5c7e1527749c07cbcf965ccff5fbdf32c506 + languageName: node + linkType: hard + +"lodash@npm:~4.17.15": + version: 4.17.21 + resolution: "lodash@npm:4.17.21" + checksum: 10c0/d8cbea072bb08655bb4c989da418994b073a608dffa608b09ac04b43a791b12aeae7cd7ad919aa4c925f33b48490b5cfe6c1f71d827956071dae2e7bb3a6b74c + languageName: node + linkType: hard + +"log-symbols@npm:^4.1.0": + version: 4.1.0 + resolution: "log-symbols@npm:4.1.0" + dependencies: + chalk: "npm:^4.1.0" + is-unicode-supported: "npm:^0.1.0" + checksum: 10c0/67f445a9ffa76db1989d0fa98586e5bc2fd5247260dafb8ad93d9f0ccd5896d53fb830b0e54dade5ad838b9de2006c826831a3c528913093af20dff8bd24aca6 + languageName: node + linkType: hard + +"lru-cache@npm:^10.0.1, lru-cache@npm:^10.2.0": + version: 10.4.3 + resolution: "lru-cache@npm:10.4.3" + checksum: 10c0/ebd04fbca961e6c1d6c0af3799adcc966a1babe798f685bb84e6599266599cd95d94630b10262f5424539bc4640107e8a33aa28585374abf561d30d16f4b39fb + languageName: node + linkType: hard + +"lru-cache@npm:^11.0.0": + version: 11.2.2 + resolution: "lru-cache@npm:11.2.2" + checksum: 10c0/72d7831bbebc85e2bdefe01047ee5584db69d641c48d7a509e86f66f6ee111b30af7ec3bd68a967d47b69a4b1fa8bbf3872630bd06a63b6735e6f0a5f1c8e83d + languageName: node + linkType: hard + +"lru-cache@npm:^6.0.0": + version: 6.0.0 + resolution: "lru-cache@npm:6.0.0" + dependencies: + yallist: "npm:^4.0.0" + checksum: 10c0/cb53e582785c48187d7a188d3379c181b5ca2a9c78d2bce3e7dee36f32761d1c42983da3fe12b55cb74e1779fa94cdc2e5367c028a9b35317184ede0c07a30a9 + languageName: node + linkType: hard + +"lunr@npm:^2.3.9": + version: 2.3.9 + resolution: "lunr@npm:2.3.9" + checksum: 10c0/77d7dbb4fbd602aac161e2b50887d8eda28c0fa3b799159cee380fbb311f1e614219126ecbbd2c3a9c685f1720a8109b3c1ca85cc893c39b6c9cc6a62a1d8a8b + languageName: node + linkType: hard + +"magic-string@npm:^0.30.17": + version: 0.30.19 + resolution: "magic-string@npm:0.30.19" + dependencies: + "@jridgewell/sourcemap-codec": "npm:^1.5.5" + checksum: 10c0/db23fd2e2ee98a1aeb88a4cdb2353137fcf05819b883c856dd79e4c7dfb25151e2a5a4d5dbd88add5e30ed8ae5c51bcf4accbc6becb75249d924ec7b4fbcae27 + languageName: node + linkType: hard + +"make-dir@npm:^4.0.0": + version: 4.0.0 + resolution: "make-dir@npm:4.0.0" + dependencies: + semver: "npm:^7.5.3" + checksum: 10c0/69b98a6c0b8e5c4fe9acb61608a9fbcfca1756d910f51e5dbe7a9e5cfb74fca9b8a0c8a0ffdf1294a740826c1ab4871d5bf3f62f72a3049e5eac6541ddffed68 + languageName: node + linkType: hard + +"make-error@npm:^1.1.1": + version: 1.3.6 + resolution: "make-error@npm:1.3.6" + checksum: 10c0/171e458d86854c6b3fc46610cfacf0b45149ba043782558c6875d9f42f222124384ad0b468c92e996d815a8a2003817a710c0a160e49c1c394626f76fa45396f + languageName: node + linkType: hard + +"make-fetch-happen@npm:^14.0.3": + version: 14.0.3 + resolution: "make-fetch-happen@npm:14.0.3" + dependencies: + "@npmcli/agent": "npm:^3.0.0" + cacache: "npm:^19.0.1" + http-cache-semantics: "npm:^4.1.1" + minipass: "npm:^7.0.2" + minipass-fetch: "npm:^4.0.0" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + negotiator: "npm:^1.0.0" + proc-log: "npm:^5.0.0" + promise-retry: "npm:^2.0.1" + ssri: "npm:^12.0.0" + checksum: 10c0/c40efb5e5296e7feb8e37155bde8eb70bc57d731b1f7d90e35a092fde403d7697c56fb49334d92d330d6f1ca29a98142036d6480a12681133a0a1453164cb2f0 + languageName: node + linkType: hard + +"markdown-it@npm:^14.1.0": + version: 14.1.0 + resolution: "markdown-it@npm:14.1.0" + dependencies: + argparse: "npm:^2.0.1" + entities: "npm:^4.4.0" + linkify-it: "npm:^5.0.0" + mdurl: "npm:^2.0.0" + punycode.js: "npm:^2.3.1" + uc.micro: "npm:^2.1.0" + bin: + markdown-it: bin/markdown-it.mjs + checksum: 10c0/9a6bb444181d2db7016a4173ae56a95a62c84d4cbfb6916a399b11d3e6581bf1cc2e4e1d07a2f022ae72c25f56db90fbe1e529fca16fbf9541659dc53480d4b4 + languageName: node + linkType: hard + +"math-intrinsics@npm:^1.1.0": + version: 1.1.0 + resolution: "math-intrinsics@npm:1.1.0" + checksum: 10c0/7579ff94e899e2f76ab64491d76cf606274c874d8f2af4a442c016bd85688927fcfca157ba6bf74b08e9439dc010b248ce05b96cc7c126a354c3bae7fcb48b7f + languageName: node + linkType: hard + +"mdurl@npm:^2.0.0": + version: 2.0.0 + resolution: "mdurl@npm:2.0.0" + checksum: 10c0/633db522272f75ce4788440669137c77540d74a83e9015666a9557a152c02e245b192edc20bc90ae953bbab727503994a53b236b4d9c99bdaee594d0e7dd2ce0 + languageName: node + linkType: hard + +"merge2@npm:^1.3.0": + version: 1.4.1 + resolution: "merge2@npm:1.4.1" + checksum: 10c0/254a8a4605b58f450308fc474c82ac9a094848081bf4c06778200207820e5193726dc563a0d2c16468810516a5c97d9d3ea0ca6585d23c58ccfff2403e8dbbeb + languageName: node + linkType: hard + +"micromatch@npm:^4.0.8": + version: 4.0.8 + resolution: "micromatch@npm:4.0.8" + dependencies: + braces: "npm:^3.0.3" + picomatch: "npm:^2.3.1" + checksum: 10c0/166fa6eb926b9553f32ef81f5f531d27b4ce7da60e5baf8c021d043b27a388fb95e46a8038d5045877881e673f8134122b59624d5cecbd16eb50a42e7a6b5ca8 + languageName: node + linkType: hard + +"mime@npm:^1.6.0": + version: 1.6.0 + resolution: "mime@npm:1.6.0" + bin: + mime: cli.js + checksum: 10c0/b92cd0adc44888c7135a185bfd0dddc42c32606401c72896a842ae15da71eb88858f17669af41e498b463cd7eb998f7b48939a25b08374c7924a9c8a6f8a81b0 + languageName: node + linkType: hard + +"minimatch@npm:10.0.3, minimatch@npm:^10.0.3": + version: 10.0.3 + resolution: "minimatch@npm:10.0.3" + dependencies: + "@isaacs/brace-expansion": "npm:^5.0.0" + checksum: 10c0/e43e4a905c5d70ac4cec8530ceaeccb9c544b1ba8ac45238e2a78121a01c17ff0c373346472d221872563204eabe929ad02669bb575cb1f0cc30facab369f70f + languageName: node + linkType: hard + +"minimatch@npm:^10.2.2": + version: 10.2.4 + resolution: "minimatch@npm:10.2.4" + dependencies: + brace-expansion: "npm:^5.0.2" + checksum: 10c0/35f3dfb7b99b51efd46afd378486889f590e7efb10e0f6a10ba6800428cf65c9a8dedb74427d0570b318d749b543dc4e85f06d46d2858bc8cac7e1eb49a95945 + languageName: node + linkType: hard + +"minimatch@npm:^3.1.2": + version: 3.1.2 + resolution: "minimatch@npm:3.1.2" + dependencies: + brace-expansion: "npm:^1.1.7" + checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + languageName: node + linkType: hard + +"minimatch@npm:^9.0.3, minimatch@npm:^9.0.4, minimatch@npm:^9.0.5": + version: 9.0.5 + resolution: "minimatch@npm:9.0.5" + dependencies: + brace-expansion: "npm:^2.0.1" + checksum: 10c0/de96cf5e35bdf0eab3e2c853522f98ffbe9a36c37797778d2665231ec1f20a9447a7e567cb640901f89e4daaa95ae5d70c65a9e8aa2bb0019b6facbc3c0575ed + languageName: node + linkType: hard + +"minimist@npm:^1.2.0, minimist@npm:^1.2.6": + version: 1.2.8 + resolution: "minimist@npm:1.2.8" + checksum: 10c0/19d3fcdca050087b84c2029841a093691a91259a47def2f18222f41e7645a0b7c44ef4b40e88a1e58a40c84d2ef0ee6047c55594d298146d0eb3f6b737c20ce6 + languageName: node + linkType: hard + +"minipass-collect@npm:^2.0.1": + version: 2.0.1 + resolution: "minipass-collect@npm:2.0.1" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/5167e73f62bb74cc5019594709c77e6a742051a647fe9499abf03c71dca75515b7959d67a764bdc4f8b361cf897fbf25e2d9869ee039203ed45240f48b9aa06e + languageName: node + linkType: hard + +"minipass-fetch@npm:^4.0.0": + version: 4.0.1 + resolution: "minipass-fetch@npm:4.0.1" + dependencies: + encoding: "npm:^0.1.13" + minipass: "npm:^7.0.3" + minipass-sized: "npm:^1.0.3" + minizlib: "npm:^3.0.1" + dependenciesMeta: + encoding: + optional: true + checksum: 10c0/a3147b2efe8e078c9bf9d024a0059339c5a09c5b1dded6900a219c218cc8b1b78510b62dae556b507304af226b18c3f1aeb1d48660283602d5b6586c399eed5c + languageName: node + linkType: hard + +"minipass-flush@npm:^1.0.5": + version: 1.0.5 + resolution: "minipass-flush@npm:1.0.5" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/2a51b63feb799d2bb34669205eee7c0eaf9dce01883261a5b77410c9408aa447e478efd191b4de6fc1101e796ff5892f8443ef20d9544385819093dbb32d36bd + languageName: node + linkType: hard + +"minipass-pipeline@npm:^1.2.4": + version: 1.2.4 + resolution: "minipass-pipeline@npm:1.2.4" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/cbda57cea20b140b797505dc2cac71581a70b3247b84480c1fed5ca5ba46c25ecc25f68bfc9e6dcb1a6e9017dab5c7ada5eab73ad4f0a49d84e35093e0c643f2 + languageName: node + linkType: hard + +"minipass-sized@npm:^1.0.3": + version: 1.0.3 + resolution: "minipass-sized@npm:1.0.3" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/298f124753efdc745cfe0f2bdfdd81ba25b9f4e753ca4a2066eb17c821f25d48acea607dfc997633ee5bf7b6dfffb4eee4f2051eb168663f0b99fad2fa4829cb + languageName: node + linkType: hard + +"minipass@npm:^3.0.0": + version: 3.3.6 + resolution: "minipass@npm:3.3.6" + dependencies: + yallist: "npm:^4.0.0" + checksum: 10c0/a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c + languageName: node + linkType: hard + +"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.2, minipass@npm:^7.0.3, minipass@npm:^7.0.4, minipass@npm:^7.1.2": + version: 7.1.2 + resolution: "minipass@npm:7.1.2" + checksum: 10c0/b0fd20bb9fb56e5fa9a8bfac539e8915ae07430a619e4b86ff71f5fc757ef3924b23b2c4230393af1eda647ed3d75739e4e0acb250a6b1eb277cf7f8fe449557 + languageName: node + linkType: hard + +"minipass@npm:^7.1.3": + version: 7.1.3 + resolution: "minipass@npm:7.1.3" + checksum: 10c0/539da88daca16533211ea5a9ee98dc62ff5742f531f54640dd34429e621955e91cc280a91a776026264b7f9f6735947629f920944e9c1558369e8bf22eb33fbb + languageName: node + linkType: hard + +"minizlib@npm:^3.0.1, minizlib@npm:^3.1.0": + version: 3.1.0 + resolution: "minizlib@npm:3.1.0" + dependencies: + minipass: "npm:^7.1.2" + checksum: 10c0/5aad75ab0090b8266069c9aabe582c021ae53eb33c6c691054a13a45db3b4f91a7fb1bd79151e6b4e9e9a86727b522527c0a06ec7d45206b745d54cd3097bcec + languageName: node + linkType: hard + +"mlly@npm:^1.7.4": + version: 1.8.0 + resolution: "mlly@npm:1.8.0" + dependencies: + acorn: "npm:^8.15.0" + pathe: "npm:^2.0.3" + pkg-types: "npm:^1.3.1" + ufo: "npm:^1.6.1" + checksum: 10c0/f174b844ae066c71e9b128046677868e2e28694f0bbeeffbe760b2a9d8ff24de0748d0fde6fabe706700c1d2e11d3c0d7a53071b5ea99671592fac03364604ab + languageName: node + linkType: hard + +"mocha@npm:^11.7.4": + version: 11.7.4 + resolution: "mocha@npm:11.7.4" + dependencies: + browser-stdout: "npm:^1.3.1" + chokidar: "npm:^4.0.1" + debug: "npm:^4.3.5" + diff: "npm:^7.0.0" + escape-string-regexp: "npm:^4.0.0" + find-up: "npm:^5.0.0" + glob: "npm:^10.4.5" + he: "npm:^1.2.0" + is-path-inside: "npm:^3.0.3" + js-yaml: "npm:^4.1.0" + log-symbols: "npm:^4.1.0" + minimatch: "npm:^9.0.5" + ms: "npm:^2.1.3" + picocolors: "npm:^1.1.1" + serialize-javascript: "npm:^6.0.2" + strip-json-comments: "npm:^3.1.1" + supports-color: "npm:^8.1.1" + workerpool: "npm:^9.2.0" + yargs: "npm:^17.7.2" + yargs-parser: "npm:^21.1.1" + yargs-unparser: "npm:^2.0.0" + bin: + _mocha: bin/_mocha + mocha: bin/mocha.js + checksum: 10c0/f84252dd93b7d67e20e3ca09c6be0da77d43795d502132a976450c5c19025de632bbaab6751dd1d8fd6311a3063d2f3647e61db84feb17900995d52e01cfb3b8 + languageName: node + linkType: hard + +"ms@npm:^2.1.1, ms@npm:^2.1.3": + version: 2.1.3 + resolution: "ms@npm:2.1.3" + checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 + languageName: node + linkType: hard + +"muggle-string@npm:^0.4.1": + version: 0.4.1 + resolution: "muggle-string@npm:0.4.1" + checksum: 10c0/e914b63e24cd23f97e18376ec47e4ba3aa24365e4776212b666add2e47bb158003212980d732c49abf3719568900af7861873844a6e2d3a7ca7e86952c0e99e9 + languageName: node + linkType: hard + +"nanoid@npm:^3.3.11": + version: 3.3.11 + resolution: "nanoid@npm:3.3.11" + bin: + nanoid: bin/nanoid.cjs + checksum: 10c0/40e7f70b3d15f725ca072dfc4f74e81fcf1fbb02e491cf58ac0c79093adc9b0a73b152bcde57df4b79cd097e13023d7504acb38404a4da7bc1cd8e887b82fe0b + languageName: node + linkType: hard + +"natural-compare@npm:^1.4.0": + version: 1.4.0 + resolution: "natural-compare@npm:1.4.0" + checksum: 10c0/f5f9a7974bfb28a91afafa254b197f0f22c684d4a1731763dda960d2c8e375b36c7d690e0d9dc8fba774c537af14a7e979129bca23d88d052fbeb9466955e447 + languageName: node + linkType: hard + +"negotiator@npm:^1.0.0": + version: 1.0.0 + resolution: "negotiator@npm:1.0.0" + checksum: 10c0/4c559dd52669ea48e1914f9d634227c561221dd54734070791f999c52ed0ff36e437b2e07d5c1f6e32909fc625fe46491c16e4a8f0572567d4dd15c3a4fda04b + languageName: node + linkType: hard + +"node-gyp@npm:latest": + version: 11.4.2 + resolution: "node-gyp@npm:11.4.2" + dependencies: + env-paths: "npm:^2.2.0" + exponential-backoff: "npm:^3.1.1" + graceful-fs: "npm:^4.2.6" + make-fetch-happen: "npm:^14.0.3" + nopt: "npm:^8.0.0" + proc-log: "npm:^5.0.0" + semver: "npm:^7.3.5" + tar: "npm:^7.4.3" + tinyglobby: "npm:^0.2.12" + which: "npm:^5.0.0" + bin: + node-gyp: bin/node-gyp.js + checksum: 10c0/0bfd3e96770ed70f07798d881dd37b4267708966d868a0e585986baac487d9cf5831285579fd629a83dc4e434f53e6416ce301097f2ee464cb74d377e4d8bdbe + languageName: node + linkType: hard + +"nopt@npm:^8.0.0": + version: 8.1.0 + resolution: "nopt@npm:8.1.0" + dependencies: + abbrev: "npm:^3.0.0" + bin: + nopt: bin/nopt.js + checksum: 10c0/62e9ea70c7a3eb91d162d2c706b6606c041e4e7b547cbbb48f8b3695af457dd6479904d7ace600856bf923dd8d1ed0696f06195c8c20f02ac87c1da0e1d315ef + languageName: node + linkType: hard + +"object-inspect@npm:^1.13.3, object-inspect@npm:^1.13.4": + version: 1.13.4 + resolution: "object-inspect@npm:1.13.4" + checksum: 10c0/d7f8711e803b96ea3191c745d6f8056ce1f2496e530e6a19a0e92d89b0fa3c76d910c31f0aa270432db6bd3b2f85500a376a83aaba849a8d518c8845b3211692 + languageName: node + linkType: hard + +"object-keys@npm:^1.1.1": + version: 1.1.1 + resolution: "object-keys@npm:1.1.1" + checksum: 10c0/b11f7ccdbc6d406d1f186cdadb9d54738e347b2692a14439ca5ac70c225fa6db46db809711b78589866d47b25fc3e8dee0b4c722ac751e11180f9380e3d8601d + languageName: node + linkType: hard + +"object.assign@npm:^4.1.7": + version: 4.1.7 + resolution: "object.assign@npm:4.1.7" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.3" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + has-symbols: "npm:^1.1.0" + object-keys: "npm:^1.1.1" + checksum: 10c0/3b2732bd860567ea2579d1567525168de925a8d852638612846bd8082b3a1602b7b89b67b09913cbb5b9bd6e95923b2ae73580baa9d99cb4e990564e8cbf5ddc + languageName: node + linkType: hard + +"object.fromentries@npm:^2.0.8": + version: 2.0.8 + resolution: "object.fromentries@npm:2.0.8" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.2" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/cd4327e6c3369cfa805deb4cbbe919bfb7d3aeebf0bcaba291bb568ea7169f8f8cdbcabe2f00b40db0c20cd20f08e11b5f3a5a36fb7dd3fe04850c50db3bf83b + languageName: node + linkType: hard + +"object.groupby@npm:^1.0.3": + version: 1.0.3 + resolution: "object.groupby@npm:1.0.3" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.2" + checksum: 10c0/60d0455c85c736fbfeda0217d1a77525956f76f7b2495edeca9e9bbf8168a45783199e77b894d30638837c654d0cc410e0e02cbfcf445bc8de71c3da1ede6a9c + languageName: node + linkType: hard + +"object.values@npm:^1.2.1": + version: 1.2.1 + resolution: "object.values@npm:1.2.1" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.3" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/3c47814fdc64842ae3d5a74bc9d06bdd8d21563c04d9939bf6716a9c00596a4ebc342552f8934013d1ec991c74e3671b26710a0c51815f0b603795605ab6b2c9 + languageName: node + linkType: hard + +"once@npm:^1.4.0": + version: 1.4.0 + resolution: "once@npm:1.4.0" + dependencies: + wrappy: "npm:1" + checksum: 10c0/5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 + languageName: node + linkType: hard + +"opener@npm:^1.5.1": + version: 1.5.2 + resolution: "opener@npm:1.5.2" + bin: + opener: bin/opener-bin.js + checksum: 10c0/dd56256ab0cf796585617bc28e06e058adf09211781e70b264c76a1dbe16e90f868c974e5bf5309c93469157c7d14b89c35dc53fe7293b0e40b4d2f92073bc79 + languageName: node + linkType: hard + +"optionator@npm:^0.9.3": + version: 0.9.4 + resolution: "optionator@npm:0.9.4" + dependencies: + deep-is: "npm:^0.1.3" + fast-levenshtein: "npm:^2.0.6" + levn: "npm:^0.4.1" + prelude-ls: "npm:^1.2.1" + type-check: "npm:^0.4.0" + word-wrap: "npm:^1.2.5" + checksum: 10c0/4afb687a059ee65b61df74dfe87d8d6815cd6883cb8b3d5883a910df72d0f5d029821f37025e4bccf4048873dbdb09acc6d303d27b8f76b1a80dd5a7d5334675 + languageName: node + linkType: hard + +"own-keys@npm:^1.0.1": + version: 1.0.1 + resolution: "own-keys@npm:1.0.1" + dependencies: + get-intrinsic: "npm:^1.2.6" + object-keys: "npm:^1.1.1" + safe-push-apply: "npm:^1.0.0" + checksum: 10c0/6dfeb3455bff92ec3f16a982d4e3e65676345f6902d9f5ded1d8265a6318d0200ce461956d6d1c70053c7fe9f9fe65e552faac03f8140d37ef0fdd108e67013a + languageName: node + linkType: hard + +"p-limit@npm:^3.0.2": + version: 3.1.0 + resolution: "p-limit@npm:3.1.0" + dependencies: + yocto-queue: "npm:^0.1.0" + checksum: 10c0/9db675949dbdc9c3763c89e748d0ef8bdad0afbb24d49ceaf4c46c02c77d30db4e0652ed36d0a0a7a95154335fab810d95c86153105bb73b3a90448e2bb14e1a + languageName: node + linkType: hard + +"p-locate@npm:^5.0.0": + version: 5.0.0 + resolution: "p-locate@npm:5.0.0" + dependencies: + p-limit: "npm:^3.0.2" + checksum: 10c0/2290d627ab7903b8b70d11d384fee714b797f6040d9278932754a6860845c4d3190603a0772a663c8cb5a7b21d1b16acb3a6487ebcafa9773094edc3dfe6009a + languageName: node + linkType: hard + +"p-map@npm:^7.0.2": + version: 7.0.3 + resolution: "p-map@npm:7.0.3" + checksum: 10c0/46091610da2b38ce47bcd1d8b4835a6fa4e832848a6682cf1652bc93915770f4617afc844c10a77d1b3e56d2472bb2d5622353fa3ead01a7f42b04fc8e744a5c + languageName: node + linkType: hard + +"package-json-from-dist@npm:^1.0.0": + version: 1.0.1 + resolution: "package-json-from-dist@npm:1.0.1" + checksum: 10c0/62ba2785eb655fec084a257af34dbe24292ab74516d6aecef97ef72d4897310bc6898f6c85b5cd22770eaa1ce60d55a0230e150fb6a966e3ecd6c511e23d164b + languageName: node + linkType: hard + +"parent-module@npm:^1.0.0": + version: 1.0.1 + resolution: "parent-module@npm:1.0.1" + dependencies: + callsites: "npm:^3.0.0" + checksum: 10c0/c63d6e80000d4babd11978e0d3fee386ca7752a02b035fd2435960ffaa7219dc42146f07069fb65e6e8bf1caef89daf9af7535a39bddf354d78bf50d8294f556 + languageName: node + linkType: hard + +"path-browserify@npm:^1.0.1": + version: 1.0.1 + resolution: "path-browserify@npm:1.0.1" + checksum: 10c0/8b8c3fd5c66bd340272180590ae4ff139769e9ab79522e2eb82e3d571a89b8117c04147f65ad066dccfb42fcad902e5b7d794b3d35e0fd840491a8ddbedf8c66 + languageName: node + linkType: hard + +"path-exists@npm:^4.0.0": + version: 4.0.0 + resolution: "path-exists@npm:4.0.0" + checksum: 10c0/8c0bd3f5238188197dc78dced15207a4716c51cc4e3624c44fc97acf69558f5ebb9a2afff486fe1b4ee148e0c133e96c5e11a9aa5c48a3006e3467da070e5e1b + languageName: node + linkType: hard + +"path-key@npm:^3.1.0": + version: 3.1.1 + resolution: "path-key@npm:3.1.1" + checksum: 10c0/748c43efd5a569c039d7a00a03b58eecd1d75f3999f5a28303d75f521288df4823bc057d8784eb72358b2895a05f29a070bc9f1f17d28226cc4e62494cc58c4c + languageName: node + linkType: hard + +"path-parse@npm:^1.0.7": + version: 1.0.7 + resolution: "path-parse@npm:1.0.7" + checksum: 10c0/11ce261f9d294cc7a58d6a574b7f1b935842355ec66fba3c3fd79e0f036462eaf07d0aa95bb74ff432f9afef97ce1926c720988c6a7451d8a584930ae7de86e1 + languageName: node + linkType: hard + +"path-scurry@npm:^1.11.1": + version: 1.11.1 + resolution: "path-scurry@npm:1.11.1" + dependencies: + lru-cache: "npm:^10.2.0" + minipass: "npm:^5.0.0 || ^6.0.2 || ^7.0.0" + checksum: 10c0/32a13711a2a505616ae1cc1b5076801e453e7aae6ac40ab55b388bb91b9d0547a52f5aaceff710ea400205f18691120d4431e520afbe4266b836fadede15872d + languageName: node + linkType: hard + +"path-scurry@npm:^2.0.0": + version: 2.0.0 + resolution: "path-scurry@npm:2.0.0" + dependencies: + lru-cache: "npm:^11.0.0" + minipass: "npm:^7.1.2" + checksum: 10c0/3da4adedaa8e7ef8d6dc4f35a0ff8f05a9b4d8365f2b28047752b62d4c1ad73eec21e37b1579ef2d075920157856a3b52ae8309c480a6f1a8bbe06ff8e52b33c + languageName: node + linkType: hard + +"path-scurry@npm:^2.0.2": + version: 2.0.2 + resolution: "path-scurry@npm:2.0.2" + dependencies: + lru-cache: "npm:^11.0.0" + minipass: "npm:^7.1.2" + checksum: 10c0/b35ad37cf6557a87fd057121ce2be7695380c9138d93e87ae928609da259ea0a170fac6f3ef1eb3ece8a068e8b7f2f3adf5bb2374cf4d4a57fe484954fcc9482 + languageName: node + linkType: hard + +"pathe@npm:^2.0.1, pathe@npm:^2.0.3": + version: 2.0.3 + resolution: "pathe@npm:2.0.3" + checksum: 10c0/c118dc5a8b5c4166011b2b70608762e260085180bb9e33e80a50dcdb1e78c010b1624f4280c492c92b05fc276715a4c357d1f9edc570f8f1b3d90b6839ebaca1 + languageName: node + linkType: hard + +"picocolors@npm:^1.1.1": + version: 1.1.1 + resolution: "picocolors@npm:1.1.1" + checksum: 10c0/e2e3e8170ab9d7c7421969adaa7e1b31434f789afb9b3f115f6b96d91945041ac3ceb02e9ec6fe6510ff036bcc0bf91e69a1772edc0b707e12b19c0f2d6bcf58 + languageName: node + linkType: hard + +"picomatch@npm:^2.3.1": + version: 2.3.1 + resolution: "picomatch@npm:2.3.1" + checksum: 10c0/26c02b8d06f03206fc2ab8d16f19960f2ff9e81a658f831ecb656d8f17d9edc799e8364b1f4a7873e89d9702dff96204be0fa26fe4181f6843f040f819dac4be + languageName: node + linkType: hard + +"picomatch@npm:^4.0.2, picomatch@npm:^4.0.3": + version: 4.0.3 + resolution: "picomatch@npm:4.0.3" + checksum: 10c0/9582c951e95eebee5434f59e426cddd228a7b97a0161a375aed4be244bd3fe8e3a31b846808ea14ef2c8a2527a6eeab7b3946a67d5979e81694654f939473ae2 + languageName: node + linkType: hard + +"pkg-pr-new@npm:^0.0.62": + version: 0.0.62 + resolution: "pkg-pr-new@npm:0.0.62" + dependencies: + "@actions/core": "npm:^1.11.1" + "@jsdevtools/ez-spawn": "npm:^3.0.4" + "@octokit/action": "npm:^6.1.0" + ignore: "npm:^5.3.1" + isbinaryfile: "npm:^5.0.2" + pkg-types: "npm:^1.1.1" + query-registry: "npm:^3.0.1" + tinyglobby: "npm:^0.2.9" + bin: + pkg-pr-new: bin/cli.js + checksum: 10c0/db76e65963800689437ad65d53b8c75b8aaec17c04ca8728af8cc3499a7e690de05e4689c99221bb24b032274bea80b7c1d46c08e5ab308f7dd3b82bdfc644f2 + languageName: node + linkType: hard + +"pkg-types@npm:^1.1.1, pkg-types@npm:^1.3.1": + version: 1.3.1 + resolution: "pkg-types@npm:1.3.1" + dependencies: + confbox: "npm:^0.1.8" + mlly: "npm:^1.7.4" + pathe: "npm:^2.0.1" + checksum: 10c0/19e6cb8b66dcc66c89f2344aecfa47f2431c988cfa3366bdfdcfb1dd6695f87dcce37fbd90fe9d1605e2f4440b77f391e83c23255347c35cf84e7fd774d7fcea + languageName: node + linkType: hard + +"pkg-types@npm:^2.3.0": + version: 2.3.0 + resolution: "pkg-types@npm:2.3.0" + dependencies: + confbox: "npm:^0.2.2" + exsolve: "npm:^1.0.7" + pathe: "npm:^2.0.3" + checksum: 10c0/d2bbddc5b81bd4741e1529c08ef4c5f1542bbdcf63498b73b8e1d84cff71806d1b8b1577800549bb569cb7aa20056257677b979bff48c97967cba7e64f72ae12 + languageName: node + linkType: hard + +"portfinder@npm:^1.0.28": + version: 1.0.38 + resolution: "portfinder@npm:1.0.38" + dependencies: + async: "npm:^3.2.6" + debug: "npm:^4.3.6" + checksum: 10c0/59b2f2aa0b620c90ce0d477241e62c277f38bfd4fb6074106c23560248dd5e5c2c629dd048ef721f32b19df4213d09b77234880e4f0ab04abf1ab70b6d8048fa + languageName: node + linkType: hard + +"possible-typed-array-names@npm:^1.0.0": + version: 1.1.0 + resolution: "possible-typed-array-names@npm:1.1.0" + checksum: 10c0/c810983414142071da1d644662ce4caebce890203eb2bc7bf119f37f3fe5796226e117e6cca146b521921fa6531072674174a3325066ac66fce089a53e1e5196 + languageName: node + linkType: hard + +"postcss@npm:^8.5.6": + version: 8.5.6 + resolution: "postcss@npm:8.5.6" + dependencies: + nanoid: "npm:^3.3.11" + picocolors: "npm:^1.1.1" + source-map-js: "npm:^1.2.1" + checksum: 10c0/5127cc7c91ed7a133a1b7318012d8bfa112da9ef092dddf369ae699a1f10ebbd89b1b9f25f3228795b84585c72aabd5ced5fc11f2ba467eedf7b081a66fad024 + languageName: node + linkType: hard + +"prelude-ls@npm:^1.2.1": + version: 1.2.1 + resolution: "prelude-ls@npm:1.2.1" + checksum: 10c0/b00d617431e7886c520a6f498a2e14c75ec58f6d93ba48c3b639cf241b54232d90daa05d83a9e9b9fef6baa63cb7e1e4602c2372fea5bc169668401eb127d0cd + languageName: node + linkType: hard + +"prettier@npm:^3.6.2": + version: 3.6.2 + resolution: "prettier@npm:3.6.2" + bin: + prettier: bin/prettier.cjs + checksum: 10c0/488cb2f2b99ec13da1e50074912870217c11edaddedeadc649b1244c749d15ba94e846423d062e2c4c9ae683e2d65f754de28889ba06e697ac4f988d44f45812 + languageName: node + linkType: hard + +"proc-log@npm:^5.0.0": + version: 5.0.0 + resolution: "proc-log@npm:5.0.0" + checksum: 10c0/bbe5edb944b0ad63387a1d5b1911ae93e05ce8d0f60de1035b218cdcceedfe39dbd2c697853355b70f1a090f8f58fe90da487c85216bf9671f9499d1a897e9e3 + languageName: node + linkType: hard + +"promise-retry@npm:^2.0.1": + version: 2.0.1 + resolution: "promise-retry@npm:2.0.1" + dependencies: + err-code: "npm:^2.0.2" + retry: "npm:^0.12.0" + checksum: 10c0/9c7045a1a2928094b5b9b15336dcd2a7b1c052f674550df63cc3f36cd44028e5080448175b6f6ca32b642de81150f5e7b1a98b728f15cb069f2dd60ac2616b96 + languageName: node + linkType: hard + +"punycode.js@npm:^2.3.1": + version: 2.3.1 + resolution: "punycode.js@npm:2.3.1" + checksum: 10c0/1d12c1c0e06127fa5db56bd7fdf698daf9a78104456a6b67326877afc21feaa821257b171539caedd2f0524027fa38e67b13dd094159c8d70b6d26d2bea4dfdb + languageName: node + linkType: hard + +"punycode@npm:^2.1.0": + version: 2.3.1 + resolution: "punycode@npm:2.3.1" + checksum: 10c0/14f76a8206bc3464f794fb2e3d3cc665ae416c01893ad7a02b23766eb07159144ee612ad67af5e84fa4479ccfe67678c4feb126b0485651b302babf66f04f9e9 + languageName: node + linkType: hard + +"qs@npm:^6.4.0": + version: 6.14.0 + resolution: "qs@npm:6.14.0" + dependencies: + side-channel: "npm:^1.1.0" + checksum: 10c0/8ea5d91bf34f440598ee389d4a7d95820e3b837d3fd9f433871f7924801becaa0cd3b3b4628d49a7784d06a8aea9bc4554d2b6d8d584e2d221dc06238a42909c + languageName: node + linkType: hard + +"quansync@npm:^0.2.11": + version: 0.2.11 + resolution: "quansync@npm:0.2.11" + checksum: 10c0/cb9a1f8ebce074069f2f6a78578873ffedd9de9f6aa212039b44c0870955c04a71c3b1311b5d97f8ac2f2ec476de202d0a5c01160cb12bc0a11b7ef36d22ef56 + languageName: node + linkType: hard + +"query-registry@npm:^3.0.1": + version: 3.0.1 + resolution: "query-registry@npm:3.0.1" + dependencies: + query-string: "npm:^9.0.0" + quick-lru: "npm:^7.0.0" + url-join: "npm:^5.0.0" + validate-npm-package-name: "npm:^5.0.1" + zod: "npm:^3.23.8" + zod-package-json: "npm:^1.0.3" + checksum: 10c0/a4d36d323cb39c02063fc6592ddbef09571ba5016caf9498bb26ecfca35b141177a11217dcfa5e9958e79ec18d3fe57b9ffdef731691a1225d2994cba4afeb82 + languageName: node + linkType: hard + +"query-string@npm:^9.0.0": + version: 9.3.1 + resolution: "query-string@npm:9.3.1" + dependencies: + decode-uri-component: "npm:^0.4.1" + filter-obj: "npm:^5.1.0" + split-on-first: "npm:^3.0.0" + checksum: 10c0/47f62350be7ace6d2e03d12becf5d1985731d97577963a4d8b684e166f6708cb42f63f09aa4c92931f2ee1ff57c0800a25807b95f28fd98f7064b1d06b02b07e + languageName: node + linkType: hard + +"queue-microtask@npm:^1.2.2": + version: 1.2.3 + resolution: "queue-microtask@npm:1.2.3" + checksum: 10c0/900a93d3cdae3acd7d16f642c29a642aea32c2026446151f0778c62ac089d4b8e6c986811076e1ae180a694cedf077d453a11b58ff0a865629a4f82ab558e102 + languageName: node + linkType: hard + +"quick-lru@npm:^7.0.0": + version: 7.3.0 + resolution: "quick-lru@npm:7.3.0" + checksum: 10c0/28dc8eaadcd489d26917f238ad27a6b09f8fe3a609152b0a4f399d5805094e07b56fe1e8c0d7ade0c11463c31bef329803672effc559601daf160a85f578fd05 + languageName: node + linkType: hard + +"randombytes@npm:^2.1.0": + version: 2.1.0 + resolution: "randombytes@npm:2.1.0" + dependencies: + safe-buffer: "npm:^5.1.0" + checksum: 10c0/50395efda7a8c94f5dffab564f9ff89736064d32addf0cc7e8bf5e4166f09f8ded7a0849ca6c2d2a59478f7d90f78f20d8048bca3cdf8be09d8e8a10790388f3 + languageName: node + linkType: hard + +"readdirp@npm:^4.0.1": + version: 4.1.2 + resolution: "readdirp@npm:4.1.2" + checksum: 10c0/60a14f7619dec48c9c850255cd523e2717001b0e179dc7037cfa0895da7b9e9ab07532d324bfb118d73a710887d1e35f79c495fa91582784493e085d18c72c62 + languageName: node + linkType: hard + +"reflect.getprototypeof@npm:^1.0.6, reflect.getprototypeof@npm:^1.0.9": + version: 1.0.10 + resolution: "reflect.getprototypeof@npm:1.0.10" + dependencies: + call-bind: "npm:^1.0.8" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.9" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.0.0" + get-intrinsic: "npm:^1.2.7" + get-proto: "npm:^1.0.1" + which-builtin-type: "npm:^1.2.1" + checksum: 10c0/7facec28c8008876f8ab98e80b7b9cb4b1e9224353fd4756dda5f2a4ab0d30fa0a5074777c6df24e1e0af463a2697513b0a11e548d99cf52f21f7bc6ba48d3ac + languageName: node + linkType: hard + +"regexp.prototype.flags@npm:^1.5.4": + version: 1.5.4 + resolution: "regexp.prototype.flags@npm:1.5.4" + dependencies: + call-bind: "npm:^1.0.8" + define-properties: "npm:^1.2.1" + es-errors: "npm:^1.3.0" + get-proto: "npm:^1.0.1" + gopd: "npm:^1.2.0" + set-function-name: "npm:^2.0.2" + checksum: 10c0/83b88e6115b4af1c537f8dabf5c3744032cb875d63bc05c288b1b8c0ef37cbe55353f95d8ca817e8843806e3e150b118bc624e4279b24b4776b4198232735a77 + languageName: node + linkType: hard + +"require-directory@npm:^2.1.1": + version: 2.1.1 + resolution: "require-directory@npm:2.1.1" + checksum: 10c0/83aa76a7bc1531f68d92c75a2ca2f54f1b01463cb566cf3fbc787d0de8be30c9dbc211d1d46be3497dac5785fe296f2dd11d531945ac29730643357978966e99 + languageName: node + linkType: hard + +"require-from-string@npm:^2.0.2": + version: 2.0.2 + resolution: "require-from-string@npm:2.0.2" + checksum: 10c0/aaa267e0c5b022fc5fd4eef49d8285086b15f2a1c54b28240fdf03599cbd9c26049fee3eab894f2e1f6ca65e513b030a7c264201e3f005601e80c49fb2937ce2 + languageName: node + linkType: hard + +"requires-port@npm:^1.0.0": + version: 1.0.0 + resolution: "requires-port@npm:1.0.0" + checksum: 10c0/b2bfdd09db16c082c4326e573a82c0771daaf7b53b9ce8ad60ea46aa6e30aaf475fe9b164800b89f93b748d2c234d8abff945d2551ba47bf5698e04cd7713267 + languageName: node + linkType: hard + +"resolve-from@npm:^4.0.0": + version: 4.0.0 + resolution: "resolve-from@npm:4.0.0" + checksum: 10c0/8408eec31a3112ef96e3746c37be7d64020cda07c03a920f5024e77290a218ea758b26ca9529fd7b1ad283947f34b2291c1c0f6aa0ed34acfdda9c6014c8d190 + languageName: node + linkType: hard + +"resolve-pkg-maps@npm:^1.0.0": + version: 1.0.0 + resolution: "resolve-pkg-maps@npm:1.0.0" + checksum: 10c0/fb8f7bbe2ca281a73b7ef423a1cbc786fb244bd7a95cbe5c3fba25b27d327150beca8ba02f622baea65919a57e061eb5005204daa5f93ed590d9b77463a567ab + languageName: node + linkType: hard + +"resolve@npm:^1.22.4, resolve@npm:~1.22.1, resolve@npm:~1.22.2": + version: 1.22.10 + resolution: "resolve@npm:1.22.10" + dependencies: + is-core-module: "npm:^2.16.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/8967e1f4e2cc40f79b7e080b4582b9a8c5ee36ffb46041dccb20e6461161adf69f843b43067b4a375de926a2cd669157e29a29578191def399dd5ef89a1b5203 + languageName: node + linkType: hard + +"resolve@patch:resolve@npm%3A^1.22.4#optional!builtin, resolve@patch:resolve@npm%3A~1.22.1#optional!builtin, resolve@patch:resolve@npm%3A~1.22.2#optional!builtin": + version: 1.22.10 + resolution: "resolve@patch:resolve@npm%3A1.22.10#optional!builtin::version=1.22.10&hash=c3c19d" + dependencies: + is-core-module: "npm:^2.16.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/52a4e505bbfc7925ac8f4cd91fd8c4e096b6a89728b9f46861d3b405ac9a1ccf4dcbf8befb4e89a2e11370dacd0160918163885cbc669369590f2f31f4c58939 + languageName: node + linkType: hard + +"retry@npm:^0.12.0": + version: 0.12.0 + resolution: "retry@npm:0.12.0" + checksum: 10c0/59933e8501727ba13ad73ef4a04d5280b3717fd650408460c987392efe9d7be2040778ed8ebe933c5cbd63da3dcc37919c141ef8af0a54a6e4fca5a2af177bfe + languageName: node + linkType: hard + +"reusify@npm:^1.0.4": + version: 1.1.0 + resolution: "reusify@npm:1.1.0" + checksum: 10c0/4eff0d4a5f9383566c7d7ec437b671cc51b25963bd61bf127c3f3d3f68e44a026d99b8d2f1ad344afff8d278a8fe70a8ea092650a716d22287e8bef7126bb2fa + languageName: node + linkType: hard + +"rimraf@npm:^6.0.1": + version: 6.0.1 + resolution: "rimraf@npm:6.0.1" + dependencies: + glob: "npm:^11.0.0" + package-json-from-dist: "npm:^1.0.0" + bin: + rimraf: dist/esm/bin.mjs + checksum: 10c0/b30b6b072771f0d1e73b4ca5f37bb2944ee09375be9db5f558fcd3310000d29dfcfa93cf7734d75295ad5a7486dc8e40f63089ced1722a664539ffc0c3ece8c6 + languageName: node + linkType: hard + +"rollup@npm:^4.43.0": + version: 4.52.4 + resolution: "rollup@npm:4.52.4" + dependencies: + "@rollup/rollup-android-arm-eabi": "npm:4.52.4" + "@rollup/rollup-android-arm64": "npm:4.52.4" + "@rollup/rollup-darwin-arm64": "npm:4.52.4" + "@rollup/rollup-darwin-x64": "npm:4.52.4" + "@rollup/rollup-freebsd-arm64": "npm:4.52.4" + "@rollup/rollup-freebsd-x64": "npm:4.52.4" + "@rollup/rollup-linux-arm-gnueabihf": "npm:4.52.4" + "@rollup/rollup-linux-arm-musleabihf": "npm:4.52.4" + "@rollup/rollup-linux-arm64-gnu": "npm:4.52.4" + "@rollup/rollup-linux-arm64-musl": "npm:4.52.4" + "@rollup/rollup-linux-loong64-gnu": "npm:4.52.4" + "@rollup/rollup-linux-ppc64-gnu": "npm:4.52.4" + "@rollup/rollup-linux-riscv64-gnu": "npm:4.52.4" + "@rollup/rollup-linux-riscv64-musl": "npm:4.52.4" + "@rollup/rollup-linux-s390x-gnu": "npm:4.52.4" + "@rollup/rollup-linux-x64-gnu": "npm:4.52.4" + "@rollup/rollup-linux-x64-musl": "npm:4.52.4" + "@rollup/rollup-openharmony-arm64": "npm:4.52.4" + "@rollup/rollup-win32-arm64-msvc": "npm:4.52.4" + "@rollup/rollup-win32-ia32-msvc": "npm:4.52.4" + "@rollup/rollup-win32-x64-gnu": "npm:4.52.4" + "@rollup/rollup-win32-x64-msvc": "npm:4.52.4" + "@types/estree": "npm:1.0.8" + fsevents: "npm:~2.3.2" + dependenciesMeta: + "@rollup/rollup-android-arm-eabi": + optional: true + "@rollup/rollup-android-arm64": + optional: true + "@rollup/rollup-darwin-arm64": + optional: true + "@rollup/rollup-darwin-x64": + optional: true + "@rollup/rollup-freebsd-arm64": + optional: true + "@rollup/rollup-freebsd-x64": + optional: true + "@rollup/rollup-linux-arm-gnueabihf": + optional: true + "@rollup/rollup-linux-arm-musleabihf": + optional: true + "@rollup/rollup-linux-arm64-gnu": + optional: true + "@rollup/rollup-linux-arm64-musl": + optional: true + "@rollup/rollup-linux-loong64-gnu": + optional: true + "@rollup/rollup-linux-ppc64-gnu": + optional: true + "@rollup/rollup-linux-riscv64-gnu": + optional: true + "@rollup/rollup-linux-riscv64-musl": + optional: true + "@rollup/rollup-linux-s390x-gnu": + optional: true + "@rollup/rollup-linux-x64-gnu": + optional: true + "@rollup/rollup-linux-x64-musl": + optional: true + "@rollup/rollup-openharmony-arm64": + optional: true + "@rollup/rollup-win32-arm64-msvc": + optional: true + "@rollup/rollup-win32-ia32-msvc": + optional: true + "@rollup/rollup-win32-x64-gnu": + optional: true + "@rollup/rollup-win32-x64-msvc": + optional: true + fsevents: + optional: true + bin: + rollup: dist/bin/rollup + checksum: 10c0/aaec0f57e887d4fb37d152f93cf7133954eec79d11643e95de768ec9a377f08793b1745c648ca65a0dcc6c795c4d9ca398724d013e5745de270e88a543782aea + languageName: node + linkType: hard + +"run-parallel@npm:^1.1.9": + version: 1.2.0 + resolution: "run-parallel@npm:1.2.0" + dependencies: + queue-microtask: "npm:^1.2.2" + checksum: 10c0/200b5ab25b5b8b7113f9901bfe3afc347e19bb7475b267d55ad0eb86a62a46d77510cb0f232507c9e5d497ebda569a08a9867d0d14f57a82ad5564d991588b39 + languageName: node + linkType: hard + +"rxjs@npm:7.8.2": + version: 7.8.2 + resolution: "rxjs@npm:7.8.2" + dependencies: + tslib: "npm:^2.1.0" + checksum: 10c0/1fcd33d2066ada98ba8f21fcbbcaee9f0b271de1d38dc7f4e256bfbc6ffcdde68c8bfb69093de7eeb46f24b1fb820620bf0223706cff26b4ab99a7ff7b2e2c45 + languageName: node + linkType: hard + +"safe-array-concat@npm:^1.1.3": + version: 1.1.3 + resolution: "safe-array-concat@npm:1.1.3" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.2" + get-intrinsic: "npm:^1.2.6" + has-symbols: "npm:^1.1.0" + isarray: "npm:^2.0.5" + checksum: 10c0/43c86ffdddc461fb17ff8a17c5324f392f4868f3c7dd2c6a5d9f5971713bc5fd755667212c80eab9567595f9a7509cc2f83e590ddaebd1bd19b780f9c79f9a8d + languageName: node + linkType: hard + +"safe-buffer@npm:5.1.2": + version: 5.1.2 + resolution: "safe-buffer@npm:5.1.2" + checksum: 10c0/780ba6b5d99cc9a40f7b951d47152297d0e260f0df01472a1b99d4889679a4b94a13d644f7dbc4f022572f09ae9005fa2fbb93bbbd83643316f365a3e9a45b21 + languageName: node + linkType: hard + +"safe-buffer@npm:^5.1.0": + version: 5.2.1 + resolution: "safe-buffer@npm:5.2.1" + checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 + languageName: node + linkType: hard + +"safe-push-apply@npm:^1.0.0": + version: 1.0.0 + resolution: "safe-push-apply@npm:1.0.0" + dependencies: + es-errors: "npm:^1.3.0" + isarray: "npm:^2.0.5" + checksum: 10c0/831f1c9aae7436429e7862c7e46f847dfe490afac20d0ee61bae06108dbf5c745a0de3568ada30ccdd3eeb0864ca8331b2eef703abd69bfea0745b21fd320750 + languageName: node + linkType: hard + +"safe-regex-test@npm:^1.1.0": + version: 1.1.0 + resolution: "safe-regex-test@npm:1.1.0" + dependencies: + call-bound: "npm:^1.0.2" + es-errors: "npm:^1.3.0" + is-regex: "npm:^1.2.1" + checksum: 10c0/f2c25281bbe5d39cddbbce7f86fca5ea9b3ce3354ea6cd7c81c31b006a5a9fff4286acc5450a3b9122c56c33eba69c56b9131ad751457b2b4a585825e6a10665 + languageName: node + linkType: hard + +"safer-buffer@npm:>= 2.1.2 < 3.0.0": + version: 2.1.2 + resolution: "safer-buffer@npm:2.1.2" + checksum: 10c0/7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4 + languageName: node + linkType: hard + +"secure-compare@npm:3.0.1": + version: 3.0.1 + resolution: "secure-compare@npm:3.0.1" + checksum: 10c0/af3102f3f555d917c8ffff7a5f6f00f70195708f4faf82d48794485c9f3cb365cee0dd4da6b4e53e8964f172970bce6069b6101ba3ce8c309bff54f460d1f650 + languageName: node + linkType: hard + +"semver@npm:^6.3.1": + version: 6.3.1 + resolution: "semver@npm:6.3.1" + bin: + semver: bin/semver.js + checksum: 10c0/e3d79b609071caa78bcb6ce2ad81c7966a46a7431d9d58b8800cfa9cb6a63699b3899a0e4bcce36167a284578212d9ae6942b6929ba4aa5015c079a67751d42d + languageName: node + linkType: hard + +"semver@npm:^7.3.5": + version: 7.7.2 + resolution: "semver@npm:7.7.2" + bin: + semver: bin/semver.js + checksum: 10c0/aca305edfbf2383c22571cb7714f48cadc7ac95371b4b52362fb8eeffdfbc0de0669368b82b2b15978f8848f01d7114da65697e56cd8c37b0dab8c58e543f9ea + languageName: node + linkType: hard + +"semver@npm:^7.5.3": + version: 7.7.4 + resolution: "semver@npm:7.7.4" + bin: + semver: bin/semver.js + checksum: 10c0/5215ad0234e2845d4ea5bb9d836d42b03499546ddafb12075566899fc617f68794bb6f146076b6881d755de17d6c6cc73372555879ec7dce2c2feee947866ad2 + languageName: node + linkType: hard + +"semver@npm:^7.6.0": + version: 7.7.3 + resolution: "semver@npm:7.7.3" + bin: + semver: bin/semver.js + checksum: 10c0/4afe5c986567db82f44c8c6faef8fe9df2a9b1d98098fc1721f57c696c4c21cebd572f297fc21002f81889492345b8470473bc6f4aff5fb032a6ea59ea2bc45e + languageName: node + linkType: hard + +"semver@npm:~7.5.4": + version: 7.5.4 + resolution: "semver@npm:7.5.4" + dependencies: + lru-cache: "npm:^6.0.0" + bin: + semver: bin/semver.js + checksum: 10c0/5160b06975a38b11c1ab55950cb5b8a23db78df88275d3d8a42ccf1f29e55112ac995b3a26a522c36e3b5f76b0445f1eef70d696b8c7862a2b4303d7b0e7609e + languageName: node + linkType: hard + +"serialize-javascript@npm:^6.0.2": + version: 6.0.2 + resolution: "serialize-javascript@npm:6.0.2" + dependencies: + randombytes: "npm:^2.1.0" + checksum: 10c0/2dd09ef4b65a1289ba24a788b1423a035581bef60817bea1f01eda8e3bda623f86357665fe7ac1b50f6d4f583f97db9615b3f07b2a2e8cbcb75033965f771dd2 + languageName: node + linkType: hard + +"set-function-length@npm:^1.2.2": + version: 1.2.2 + resolution: "set-function-length@npm:1.2.2" + dependencies: + define-data-property: "npm:^1.1.4" + es-errors: "npm:^1.3.0" + function-bind: "npm:^1.1.2" + get-intrinsic: "npm:^1.2.4" + gopd: "npm:^1.0.1" + has-property-descriptors: "npm:^1.0.2" + checksum: 10c0/82850e62f412a258b71e123d4ed3873fa9377c216809551192bb6769329340176f109c2eeae8c22a8d386c76739855f78e8716515c818bcaef384b51110f0f3c + languageName: node + linkType: hard + +"set-function-name@npm:^2.0.2": + version: 2.0.2 + resolution: "set-function-name@npm:2.0.2" + dependencies: + define-data-property: "npm:^1.1.4" + es-errors: "npm:^1.3.0" + functions-have-names: "npm:^1.2.3" + has-property-descriptors: "npm:^1.0.2" + checksum: 10c0/fce59f90696c450a8523e754abb305e2b8c73586452619c2bad5f7bf38c7b6b4651895c9db895679c5bef9554339cf3ef1c329b66ece3eda7255785fbe299316 + languageName: node + linkType: hard + +"set-proto@npm:^1.0.0": + version: 1.0.0 + resolution: "set-proto@npm:1.0.0" + dependencies: + dunder-proto: "npm:^1.0.1" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/ca5c3ccbba479d07c30460e367e66337cec825560b11e8ba9c5ebe13a2a0d6021ae34eddf94ff3dfe17a3104dc1f191519cb6c48378b503e5c3f36393938776a + languageName: node + linkType: hard + +"shebang-command@npm:^2.0.0": + version: 2.0.0 + resolution: "shebang-command@npm:2.0.0" + dependencies: + shebang-regex: "npm:^3.0.0" + checksum: 10c0/a41692e7d89a553ef21d324a5cceb5f686d1f3c040759c50aab69688634688c5c327f26f3ecf7001ebfd78c01f3c7c0a11a7c8bfd0a8bc9f6240d4f40b224e4e + languageName: node + linkType: hard + +"shebang-regex@npm:^3.0.0": + version: 3.0.0 + resolution: "shebang-regex@npm:3.0.0" + checksum: 10c0/1dbed0726dd0e1152a92696c76c7f06084eb32a90f0528d11acd764043aacf76994b2fb30aa1291a21bd019d6699164d048286309a278855ee7bec06cf6fb690 + languageName: node + linkType: hard + +"shell-quote@npm:1.8.3": + version: 1.8.3 + resolution: "shell-quote@npm:1.8.3" + checksum: 10c0/bee87c34e1e986cfb4c30846b8e6327d18874f10b535699866f368ade11ea4ee45433d97bf5eada22c4320c27df79c3a6a7eb1bf3ecfc47f2c997d9e5e2672fd + languageName: node + linkType: hard + +"side-channel-list@npm:^1.0.0": + version: 1.0.0 + resolution: "side-channel-list@npm:1.0.0" + dependencies: + es-errors: "npm:^1.3.0" + object-inspect: "npm:^1.13.3" + checksum: 10c0/644f4ac893456c9490ff388bf78aea9d333d5e5bfc64cfb84be8f04bf31ddc111a8d4b83b85d7e7e8a7b845bc185a9ad02c052d20e086983cf59f0be517d9b3d + languageName: node + linkType: hard + +"side-channel-map@npm:^1.0.1": + version: 1.0.1 + resolution: "side-channel-map@npm:1.0.1" + dependencies: + call-bound: "npm:^1.0.2" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.5" + object-inspect: "npm:^1.13.3" + checksum: 10c0/010584e6444dd8a20b85bc926d934424bd809e1a3af941cace229f7fdcb751aada0fb7164f60c2e22292b7fa3c0ff0bce237081fd4cdbc80de1dc68e95430672 + languageName: node + linkType: hard + +"side-channel-weakmap@npm:^1.0.2": + version: 1.0.2 + resolution: "side-channel-weakmap@npm:1.0.2" + dependencies: + call-bound: "npm:^1.0.2" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.5" + object-inspect: "npm:^1.13.3" + side-channel-map: "npm:^1.0.1" + checksum: 10c0/71362709ac233e08807ccd980101c3e2d7efe849edc51455030327b059f6c4d292c237f94dc0685031dd11c07dd17a68afde235d6cf2102d949567f98ab58185 + languageName: node + linkType: hard + +"side-channel@npm:^1.1.0": + version: 1.1.0 + resolution: "side-channel@npm:1.1.0" + dependencies: + es-errors: "npm:^1.3.0" + object-inspect: "npm:^1.13.3" + side-channel-list: "npm:^1.0.0" + side-channel-map: "npm:^1.0.1" + side-channel-weakmap: "npm:^1.0.2" + checksum: 10c0/cb20dad41eb032e6c24c0982e1e5a24963a28aa6122b4f05b3f3d6bf8ae7fd5474ef382c8f54a6a3ab86e0cac4d41a23bd64ede3970e5bfb50326ba02a7996e6 + languageName: node + linkType: hard + +"signal-exit@npm:^4.0.1": + version: 4.1.0 + resolution: "signal-exit@npm:4.1.0" + checksum: 10c0/41602dce540e46d599edba9d9860193398d135f7ff72cab629db5171516cfae628d21e7bfccde1bbfdf11c48726bc2a6d1a8fb8701125852fbfda7cf19c6aa83 + languageName: node + linkType: hard + +"smart-buffer@npm:^4.2.0": + version: 4.2.0 + resolution: "smart-buffer@npm:4.2.0" + checksum: 10c0/a16775323e1404dd43fabafe7460be13a471e021637bc7889468eb45ce6a6b207261f454e4e530a19500cc962c4cc5348583520843b363f4193cee5c00e1e539 + languageName: node + linkType: hard + +"socks-proxy-agent@npm:^8.0.3": + version: 8.0.5 + resolution: "socks-proxy-agent@npm:8.0.5" + dependencies: + agent-base: "npm:^7.1.2" + debug: "npm:^4.3.4" + socks: "npm:^2.8.3" + checksum: 10c0/5d2c6cecba6821389aabf18728325730504bf9bb1d9e342e7987a5d13badd7a98838cc9a55b8ed3cb866ad37cc23e1086f09c4d72d93105ce9dfe76330e9d2a6 + languageName: node + linkType: hard + +"socks@npm:^2.8.3": + version: 2.8.7 + resolution: "socks@npm:2.8.7" + dependencies: + ip-address: "npm:^10.0.1" + smart-buffer: "npm:^4.2.0" + checksum: 10c0/2805a43a1c4bcf9ebf6e018268d87b32b32b06fbbc1f9282573583acc155860dc361500f89c73bfbb157caa1b4ac78059eac0ef15d1811eb0ca75e0bdadbc9d2 + languageName: node + linkType: hard + +"source-map-js@npm:^1.2.1": + version: 1.2.1 + resolution: "source-map-js@npm:1.2.1" + checksum: 10c0/7bda1fc4c197e3c6ff17de1b8b2c20e60af81b63a52cb32ec5a5d67a20a7d42651e2cb34ebe93833c5a2a084377e17455854fee3e21e7925c64a51b6a52b0faf + languageName: node + linkType: hard + +"source-map@npm:~0.6.1": + version: 0.6.1 + resolution: "source-map@npm:0.6.1" + checksum: 10c0/ab55398007c5e5532957cb0beee2368529618ac0ab372d789806f5718123cc4367d57de3904b4e6a4170eb5a0b0f41373066d02ca0735a0c4d75c7d328d3e011 + languageName: node + linkType: hard + +"split-on-first@npm:^3.0.0": + version: 3.0.0 + resolution: "split-on-first@npm:3.0.0" + checksum: 10c0/a1262eae12b68de235e1a08e011bf5b42c42621985ddf807e6221fb1e2b3304824913ae7019f18436b96b8fab8aef5f1ad80dedd2385317fdc51b521c3882cd0 + languageName: node + linkType: hard + +"sprintf-js@npm:~1.0.2": + version: 1.0.3 + resolution: "sprintf-js@npm:1.0.3" + checksum: 10c0/ecadcfe4c771890140da5023d43e190b7566d9cf8b2d238600f31bec0fc653f328da4450eb04bd59a431771a8e9cc0e118f0aa3974b683a4981b4e07abc2a5bb + languageName: node + linkType: hard + +"ssri@npm:^12.0.0": + version: 12.0.0 + resolution: "ssri@npm:12.0.0" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/caddd5f544b2006e88fa6b0124d8d7b28208b83c72d7672d5ade44d794525d23b540f3396108c4eb9280dcb7c01f0bef50682f5b4b2c34291f7c5e211fd1417d + languageName: node + linkType: hard + +"stop-iteration-iterator@npm:^1.1.0": + version: 1.1.0 + resolution: "stop-iteration-iterator@npm:1.1.0" + dependencies: + es-errors: "npm:^1.3.0" + internal-slot: "npm:^1.1.0" + checksum: 10c0/de4e45706bb4c0354a4b1122a2b8cc45a639e86206807ce0baf390ee9218d3ef181923fa4d2b67443367c491aa255c5fbaa64bb74648e3c5b48299928af86c09 + languageName: node + linkType: hard + +"string-argv@npm:^0.3.1, string-argv@npm:~0.3.1": + version: 0.3.2 + resolution: "string-argv@npm:0.3.2" + checksum: 10c0/75c02a83759ad1722e040b86823909d9a2fc75d15dd71ec4b537c3560746e33b5f5a07f7332d1e3f88319909f82190843aa2f0a0d8c8d591ec08e93d5b8dec82 + languageName: node + linkType: hard + +"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": + version: 4.2.3 + resolution: "string-width@npm:4.2.3" + dependencies: + emoji-regex: "npm:^8.0.0" + is-fullwidth-code-point: "npm:^3.0.0" + strip-ansi: "npm:^6.0.1" + checksum: 10c0/1e525e92e5eae0afd7454086eed9c818ee84374bb80328fc41217ae72ff5f065ef1c9d7f72da41de40c75fa8bb3dee63d92373fd492c84260a552c636392a47b + languageName: node + linkType: hard + +"string-width@npm:^5.0.1, string-width@npm:^5.1.2": + version: 5.1.2 + resolution: "string-width@npm:5.1.2" + dependencies: + eastasianwidth: "npm:^0.2.0" + emoji-regex: "npm:^9.2.2" + strip-ansi: "npm:^7.0.1" + checksum: 10c0/ab9c4264443d35b8b923cbdd513a089a60de339216d3b0ed3be3ba57d6880e1a192b70ae17225f764d7adbf5994e9bb8df253a944736c15a0240eff553c678ca + languageName: node + linkType: hard + +"string.prototype.trim@npm:^1.2.10": + version: 1.2.10 + resolution: "string.prototype.trim@npm:1.2.10" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.2" + define-data-property: "npm:^1.1.4" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.5" + es-object-atoms: "npm:^1.0.0" + has-property-descriptors: "npm:^1.0.2" + checksum: 10c0/8a8854241c4b54a948e992eb7dd6b8b3a97185112deb0037a134f5ba57541d8248dd610c966311887b6c2fd1181a3877bffb14d873ce937a344535dabcc648f8 + languageName: node + linkType: hard + +"string.prototype.trimend@npm:^1.0.9": + version: 1.0.9 + resolution: "string.prototype.trimend@npm:1.0.9" + dependencies: + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.2" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/59e1a70bf9414cb4c536a6e31bef5553c8ceb0cf44d8b4d0ed65c9653358d1c64dd0ec203b100df83d0413bbcde38b8c5d49e14bc4b86737d74adc593a0d35b6 + languageName: node + linkType: hard + +"string.prototype.trimstart@npm:^1.0.8": + version: 1.0.8 + resolution: "string.prototype.trimstart@npm:1.0.8" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/d53af1899959e53c83b64a5fd120be93e067da740e7e75acb433849aa640782fb6c7d4cd5b84c954c84413745a3764df135a8afeb22908b86a835290788d8366 + languageName: node + linkType: hard + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1, strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": + version: 6.0.1 + resolution: "strip-ansi@npm:6.0.1" + dependencies: + ansi-regex: "npm:^5.0.1" + checksum: 10c0/1ae5f212a126fe5b167707f716942490e3933085a5ff6c008ab97ab2f272c8025d3aa218b7bd6ab25729ca20cc81cddb252102f8751e13482a5199e873680952 + languageName: node + linkType: hard + +"strip-ansi@npm:^7.0.1": + version: 7.1.2 + resolution: "strip-ansi@npm:7.1.2" + dependencies: + ansi-regex: "npm:^6.0.1" + checksum: 10c0/0d6d7a023de33368fd042aab0bf48f4f4077abdfd60e5393e73c7c411e85e1b3a83507c11af2e656188511475776215df9ca589b4da2295c9455cc399ce1858b + languageName: node + linkType: hard + +"strip-bom@npm:^3.0.0": + version: 3.0.0 + resolution: "strip-bom@npm:3.0.0" + checksum: 10c0/51201f50e021ef16672593d7434ca239441b7b760e905d9f33df6e4f3954ff54ec0e0a06f100d028af0982d6f25c35cd5cda2ce34eaebccd0250b8befb90d8f1 + languageName: node + linkType: hard + +"strip-json-comments@npm:^3.1.1, strip-json-comments@npm:~3.1.1": + version: 3.1.1 + resolution: "strip-json-comments@npm:3.1.1" + checksum: 10c0/9681a6257b925a7fa0f285851c0e613cc934a50661fa7bb41ca9cbbff89686bb4a0ee366e6ecedc4daafd01e83eee0720111ab294366fe7c185e935475ebcecd + languageName: node + linkType: hard + +"supports-color@npm:8.1.1, supports-color@npm:^8.1.1, supports-color@npm:~8.1.1": + version: 8.1.1 + resolution: "supports-color@npm:8.1.1" + dependencies: + has-flag: "npm:^4.0.0" + checksum: 10c0/ea1d3c275dd604c974670f63943ed9bd83623edc102430c05adb8efc56ba492746b6e95386e7831b872ec3807fd89dd8eb43f735195f37b5ec343e4234cc7e89 + languageName: node + linkType: hard + +"supports-color@npm:^7.1.0": + version: 7.2.0 + resolution: "supports-color@npm:7.2.0" + dependencies: + has-flag: "npm:^4.0.0" + checksum: 10c0/afb4c88521b8b136b5f5f95160c98dee7243dc79d5432db7efc27efb219385bbc7d9427398e43dd6cc730a0f87d5085ce1652af7efbe391327bc0a7d0f7fc124 + languageName: node + linkType: hard + +"supports-preserve-symlinks-flag@npm:^1.0.0": + version: 1.0.0 + resolution: "supports-preserve-symlinks-flag@npm:1.0.0" + checksum: 10c0/6c4032340701a9950865f7ae8ef38578d8d7053f5e10518076e6554a9381fa91bd9c6850193695c141f32b21f979c985db07265a758867bac95de05f7d8aeb39 + languageName: node + linkType: hard + +"tar@npm:^7.4.3": + version: 7.5.1 + resolution: "tar@npm:7.5.1" + dependencies: + "@isaacs/fs-minipass": "npm:^4.0.0" + chownr: "npm:^3.0.0" + minipass: "npm:^7.1.2" + minizlib: "npm:^3.1.0" + yallist: "npm:^5.0.0" + checksum: 10c0/0dad0596a61586180981133b20c32cfd93c5863c5b7140d646714e6ea8ec84583b879e5dc3928a4d683be6e6109ad7ea3de1cf71986d5194f81b3a016c8858c9 + languageName: node + linkType: hard + +"test-exclude@npm:^8.0.0": + version: 8.0.0 + resolution: "test-exclude@npm:8.0.0" + dependencies: + "@istanbuljs/schema": "npm:^0.1.2" + glob: "npm:^13.0.6" + minimatch: "npm:^10.2.2" + checksum: 10c0/f2b613cb5ddc05d1357892f5da965a6f7af42b19a6b2fc30c9b93cb74adf5059a3a9f29818adb75c96c1747b3934caac90a9058f73ce0640ea101de828a11600 + languageName: node + linkType: hard + +"tinyglobby@npm:^0.2.12, tinyglobby@npm:^0.2.15, tinyglobby@npm:^0.2.9": + version: 0.2.15 + resolution: "tinyglobby@npm:0.2.15" + dependencies: + fdir: "npm:^6.5.0" + picomatch: "npm:^4.0.3" + checksum: 10c0/869c31490d0d88eedb8305d178d4c75e7463e820df5a9b9d388291daf93e8b1eb5de1dad1c1e139767e4269fe75f3b10d5009b2cc14db96ff98986920a186844 + languageName: node + linkType: hard + +"to-regex-range@npm:^5.0.1": + version: 5.0.1 + resolution: "to-regex-range@npm:5.0.1" + dependencies: + is-number: "npm:^7.0.0" + checksum: 10c0/487988b0a19c654ff3e1961b87f471702e708fa8a8dd02a298ef16da7206692e8552a0250e8b3e8759270f62e9d8314616f6da274734d3b558b1fc7b7724e892 + languageName: node + linkType: hard + +"torch@workspace:.": + version: 0.0.0-use.local + resolution: "torch@workspace:." + dependencies: + "@eslint/js": "npm:^9.37.0" + "@types/mocha": "npm:^10.0.10" + "@types/node": "npm:^24.6.2" + c8: "npm:^11.0.0" + chai: "npm:^6.2.0" + concurrently: "npm:^9.2.1" + eslint: "npm:^9.37.0" + eslint-plugin-import: "npm:^2.32.0" + http-server: "npm:^14.1.1" + mocha: "npm:^11.7.4" + pkg-pr-new: "npm:^0.0.62" + prettier: "npm:^3.6.2" + rimraf: "npm:^6.0.1" + ts-node: "npm:^10.9.2" + tsx: "npm:^4.20.6" + typedoc: "npm:^0.28.14" + typescript: "npm:^5.9.3" + typescript-eslint: "npm:^8.46.0" + vite: "npm:^7.1.9" + vite-plugin-dts: "npm:^4.5.4" + languageName: unknown + linkType: soft + +"tree-kill@npm:1.2.2": + version: 1.2.2 + resolution: "tree-kill@npm:1.2.2" + bin: + tree-kill: cli.js + checksum: 10c0/7b1b7c7f17608a8f8d20a162e7957ac1ef6cd1636db1aba92f4e072dc31818c2ff0efac1e3d91064ede67ed5dc57c565420531a8134090a12ac10cf792ab14d2 + languageName: node + linkType: hard + +"ts-api-utils@npm:^2.1.0": + version: 2.1.0 + resolution: "ts-api-utils@npm:2.1.0" + peerDependencies: + typescript: ">=4.8.4" + checksum: 10c0/9806a38adea2db0f6aa217ccc6bc9c391ddba338a9fe3080676d0d50ed806d305bb90e8cef0276e793d28c8a929f400abb184ddd7ff83a416959c0f4d2ce754f + languageName: node + linkType: hard + +"ts-node@npm:^10.9.2": + version: 10.9.2 + resolution: "ts-node@npm:10.9.2" + dependencies: + "@cspotcode/source-map-support": "npm:^0.8.0" + "@tsconfig/node10": "npm:^1.0.7" + "@tsconfig/node12": "npm:^1.0.7" + "@tsconfig/node14": "npm:^1.0.0" + "@tsconfig/node16": "npm:^1.0.2" + acorn: "npm:^8.4.1" + acorn-walk: "npm:^8.1.1" + arg: "npm:^4.1.0" + create-require: "npm:^1.1.0" + diff: "npm:^4.0.1" + make-error: "npm:^1.1.1" + v8-compile-cache-lib: "npm:^3.0.1" + yn: "npm:3.1.1" + peerDependencies: + "@swc/core": ">=1.2.50" + "@swc/wasm": ">=1.2.50" + "@types/node": "*" + typescript: ">=2.7" + peerDependenciesMeta: + "@swc/core": + optional: true + "@swc/wasm": + optional: true + bin: + ts-node: dist/bin.js + ts-node-cwd: dist/bin-cwd.js + ts-node-esm: dist/bin-esm.js + ts-node-script: dist/bin-script.js + ts-node-transpile-only: dist/bin-transpile.js + ts-script: dist/bin-script-deprecated.js + checksum: 10c0/5f29938489f96982a25ba650b64218e83a3357d76f7bede80195c65ab44ad279c8357264639b7abdd5d7e75fc269a83daa0e9c62fd8637a3def67254ecc9ddc2 + languageName: node + linkType: hard + +"tsconfig-paths@npm:^3.15.0": + version: 3.15.0 + resolution: "tsconfig-paths@npm:3.15.0" + dependencies: + "@types/json5": "npm:^0.0.29" + json5: "npm:^1.0.2" + minimist: "npm:^1.2.6" + strip-bom: "npm:^3.0.0" + checksum: 10c0/5b4f301a2b7a3766a986baf8fc0e177eb80bdba6e396792ff92dc23b5bca8bb279fc96517dcaaef63a3b49bebc6c4c833653ec58155780bc906bdbcf7dda0ef5 + languageName: node + linkType: hard + +"tslib@npm:^2.1.0": + version: 2.8.1 + resolution: "tslib@npm:2.8.1" + checksum: 10c0/9c4759110a19c53f992d9aae23aac5ced636e99887b51b9e61def52611732872ff7668757d4e4c61f19691e36f4da981cd9485e869b4a7408d689f6bf1f14e62 + languageName: node + linkType: hard + +"tsx@npm:^4.20.6": + version: 4.20.6 + resolution: "tsx@npm:4.20.6" + dependencies: + esbuild: "npm:~0.25.0" + fsevents: "npm:~2.3.3" + get-tsconfig: "npm:^4.7.5" + dependenciesMeta: + fsevents: + optional: true + bin: + tsx: dist/cli.mjs + checksum: 10c0/07757a9bf62c271e0a00869b2008c5f2d6e648766536e4faf27d9d8027b7cde1ac8e4871f4bb570c99388bcee0018e6869dad98c07df809b8052f9c549cd216f + languageName: node + linkType: hard + +"tunnel@npm:^0.0.6": + version: 0.0.6 + resolution: "tunnel@npm:0.0.6" + checksum: 10c0/e27e7e896f2426c1c747325b5f54efebc1a004647d853fad892b46d64e37591ccd0b97439470795e5262b5c0748d22beb4489a04a0a448029636670bfd801b75 + languageName: node + linkType: hard + +"type-check@npm:^0.4.0, type-check@npm:~0.4.0": + version: 0.4.0 + resolution: "type-check@npm:0.4.0" + dependencies: + prelude-ls: "npm:^1.2.1" + checksum: 10c0/7b3fd0ed43891e2080bf0c5c504b418fbb3e5c7b9708d3d015037ba2e6323a28152ec163bcb65212741fa5d2022e3075ac3c76440dbd344c9035f818e8ecee58 + languageName: node + linkType: hard + +"type-detect@npm:^4.0.8": + version: 4.1.0 + resolution: "type-detect@npm:4.1.0" + checksum: 10c0/df8157ca3f5d311edc22885abc134e18ff8ffbc93d6a9848af5b682730ca6a5a44499259750197250479c5331a8a75b5537529df5ec410622041650a7f293e2a + languageName: node + linkType: hard + +"typed-array-buffer@npm:^1.0.3": + version: 1.0.3 + resolution: "typed-array-buffer@npm:1.0.3" + dependencies: + call-bound: "npm:^1.0.3" + es-errors: "npm:^1.3.0" + is-typed-array: "npm:^1.1.14" + checksum: 10c0/1105071756eb248774bc71646bfe45b682efcad93b55532c6ffa4518969fb6241354e4aa62af679ae83899ec296d69ef88f1f3763657cdb3a4d29321f7b83079 + languageName: node + linkType: hard + +"typed-array-byte-length@npm:^1.0.3": + version: 1.0.3 + resolution: "typed-array-byte-length@npm:1.0.3" + dependencies: + call-bind: "npm:^1.0.8" + for-each: "npm:^0.3.3" + gopd: "npm:^1.2.0" + has-proto: "npm:^1.2.0" + is-typed-array: "npm:^1.1.14" + checksum: 10c0/6ae083c6f0354f1fce18b90b243343b9982affd8d839c57bbd2c174a5d5dc71be9eb7019ffd12628a96a4815e7afa85d718d6f1e758615151d5f35df841ffb3e + languageName: node + linkType: hard + +"typed-array-byte-offset@npm:^1.0.4": + version: 1.0.4 + resolution: "typed-array-byte-offset@npm:1.0.4" + dependencies: + available-typed-arrays: "npm:^1.0.7" + call-bind: "npm:^1.0.8" + for-each: "npm:^0.3.3" + gopd: "npm:^1.2.0" + has-proto: "npm:^1.2.0" + is-typed-array: "npm:^1.1.15" + reflect.getprototypeof: "npm:^1.0.9" + checksum: 10c0/3d805b050c0c33b51719ee52de17c1cd8e6a571abdf0fffb110e45e8dd87a657e8b56eee94b776b13006d3d347a0c18a730b903cf05293ab6d92e99ff8f77e53 + languageName: node + linkType: hard + +"typed-array-length@npm:^1.0.7": + version: 1.0.7 + resolution: "typed-array-length@npm:1.0.7" + dependencies: + call-bind: "npm:^1.0.7" + for-each: "npm:^0.3.3" + gopd: "npm:^1.0.1" + is-typed-array: "npm:^1.1.13" + possible-typed-array-names: "npm:^1.0.0" + reflect.getprototypeof: "npm:^1.0.6" + checksum: 10c0/e38f2ae3779584c138a2d8adfa8ecf749f494af3cd3cdafe4e688ce51418c7d2c5c88df1bd6be2bbea099c3f7cea58c02ca02ed438119e91f162a9de23f61295 + languageName: node + linkType: hard + +"typedoc@npm:^0.28.14": + version: 0.28.14 + resolution: "typedoc@npm:0.28.14" + dependencies: + "@gerrit0/mini-shiki": "npm:^3.12.0" + lunr: "npm:^2.3.9" + markdown-it: "npm:^14.1.0" + minimatch: "npm:^9.0.5" + yaml: "npm:^2.8.1" + peerDependencies: + typescript: 5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x || 5.7.x || 5.8.x || 5.9.x + bin: + typedoc: bin/typedoc + checksum: 10c0/a8727134991ba3f9a982e9f6ceecfbcf0fac531e4865e4865cdee68ea6fe1a594228b8654011d38ffa2332b7e84e4eaa3d0dac04a8bdf36a0686d1c3f327e80b + languageName: node + linkType: hard + +"typescript-eslint@npm:^8.46.0": + version: 8.46.0 + resolution: "typescript-eslint@npm:8.46.0" + dependencies: + "@typescript-eslint/eslint-plugin": "npm:8.46.0" + "@typescript-eslint/parser": "npm:8.46.0" + "@typescript-eslint/typescript-estree": "npm:8.46.0" + "@typescript-eslint/utils": "npm:8.46.0" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: ">=4.8.4 <6.0.0" + checksum: 10c0/fcfd2bbf2932734fced5515b3697e0f8e69ba2aca40625d12f4e0225396407e816aa9085ff4886e8f79903110b02676deeaef2525d60700a4f5d47841a27282a + languageName: node + linkType: hard + +"typescript@npm:5.8.2": + version: 5.8.2 + resolution: "typescript@npm:5.8.2" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/5c4f6fbf1c6389b6928fe7b8fcd5dc73bb2d58cd4e3883f1d774ed5bd83b151cbac6b7ecf11723de56d4676daeba8713894b1e9af56174f2f9780ae7848ec3c6 + languageName: node + linkType: hard + +"typescript@npm:^5.9.3": + version: 5.9.3 + resolution: "typescript@npm:5.9.3" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/6bd7552ce39f97e711db5aa048f6f9995b53f1c52f7d8667c1abdc1700c68a76a308f579cd309ce6b53646deb4e9a1be7c813a93baaf0a28ccd536a30270e1c5 + languageName: node + linkType: hard + +"typescript@patch:typescript@npm%3A5.8.2#optional!builtin": + version: 5.8.2 + resolution: "typescript@patch:typescript@npm%3A5.8.2#optional!builtin::version=5.8.2&hash=5786d5" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/5448a08e595cc558ab321e49d4cac64fb43d1fa106584f6ff9a8d8e592111b373a995a1d5c7f3046211c8a37201eb6d0f1566f15cdb7a62a5e3be01d087848e2 + languageName: node + linkType: hard + +"typescript@patch:typescript@npm%3A^5.9.3#optional!builtin": + version: 5.9.3 + resolution: "typescript@patch:typescript@npm%3A5.9.3#optional!builtin::version=5.9.3&hash=5786d5" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/ad09fdf7a756814dce65bc60c1657b40d44451346858eea230e10f2e95a289d9183b6e32e5c11e95acc0ccc214b4f36289dcad4bf1886b0adb84d711d336a430 + languageName: node + linkType: hard + +"uc.micro@npm:^2.0.0, uc.micro@npm:^2.1.0": + version: 2.1.0 + resolution: "uc.micro@npm:2.1.0" + checksum: 10c0/8862eddb412dda76f15db8ad1c640ccc2f47cdf8252a4a30be908d535602c8d33f9855dfcccb8b8837855c1ce1eaa563f7fa7ebe3c98fd0794351aab9b9c55fa + languageName: node + linkType: hard + +"ufo@npm:^1.6.1": + version: 1.6.1 + resolution: "ufo@npm:1.6.1" + checksum: 10c0/5a9f041e5945fba7c189d5410508cbcbefef80b253ed29aa2e1f8a2b86f4bd51af44ee18d4485e6d3468c92be9bf4a42e3a2b72dcaf27ce39ce947ec994f1e6b + languageName: node + linkType: hard + +"unbox-primitive@npm:^1.1.0": + version: 1.1.0 + resolution: "unbox-primitive@npm:1.1.0" + dependencies: + call-bound: "npm:^1.0.3" + has-bigints: "npm:^1.0.2" + has-symbols: "npm:^1.1.0" + which-boxed-primitive: "npm:^1.1.1" + checksum: 10c0/7dbd35ab02b0e05fe07136c72cb9355091242455473ec15057c11430129bab38b7b3624019b8778d02a881c13de44d63cd02d122ee782fb519e1de7775b5b982 + languageName: node + linkType: hard + +"undici-types@npm:~7.13.0": + version: 7.13.0 + resolution: "undici-types@npm:7.13.0" + checksum: 10c0/44bbb0935425291351bfd8039571f017295b5d6dc5727045d0a4fea8c6ffe73a6703b48ce010f9cb539b9041a75b463f8cfe1e7309cab7486452505fb0d66151 + languageName: node + linkType: hard + +"undici@npm:^5.25.4": + version: 5.29.0 + resolution: "undici@npm:5.29.0" + dependencies: + "@fastify/busboy": "npm:^2.0.0" + checksum: 10c0/e4e4d631ca54ee0ad82d2e90e7798fa00a106e27e6c880687e445cc2f13b4bc87c5eba2a88c266c3eecffb18f26e227b778412da74a23acc374fca7caccec49b + languageName: node + linkType: hard + +"undici@npm:^6.0.0": + version: 6.23.0 + resolution: "undici@npm:6.23.0" + checksum: 10c0/d846b3fdfd05aa6081ba1eab5db6bbc21b283042c7a43722b86d1ee2bf749d7c990ceac0c809f9a07ffd88b1b0f4c0f548a8362c035088cb1997d63abdda499c + languageName: node + linkType: hard + +"union@npm:~0.5.0": + version: 0.5.0 + resolution: "union@npm:0.5.0" + dependencies: + qs: "npm:^6.4.0" + checksum: 10c0/9ac158d99991063180e56f408f5991e808fa07594713439c098116da09215c154672ee8c832e16a6b39b037609c08bcaff8ff07c1e3e46c3cc622897972af2aa + languageName: node + linkType: hard + +"unique-filename@npm:^4.0.0": + version: 4.0.0 + resolution: "unique-filename@npm:4.0.0" + dependencies: + unique-slug: "npm:^5.0.0" + checksum: 10c0/38ae681cceb1408ea0587b6b01e29b00eee3c84baee1e41fd5c16b9ed443b80fba90c40e0ba69627e30855570a34ba8b06702d4a35035d4b5e198bf5a64c9ddc + languageName: node + linkType: hard + +"unique-slug@npm:^5.0.0": + version: 5.0.0 + resolution: "unique-slug@npm:5.0.0" + dependencies: + imurmurhash: "npm:^0.1.4" + checksum: 10c0/d324c5a44887bd7e105ce800fcf7533d43f29c48757ac410afd42975de82cc38ea2035c0483f4de82d186691bf3208ef35c644f73aa2b1b20b8e651be5afd293 + languageName: node + linkType: hard + +"universal-user-agent@npm:^6.0.0": + version: 6.0.1 + resolution: "universal-user-agent@npm:6.0.1" + checksum: 10c0/5c9c46ffe19a975e11e6443640ed4c9e0ce48fcc7203325757a8414ac49940ebb0f4667f2b1fa561489d1eb22cb2d05a0f7c82ec20c5cba42e58e188fb19b187 + languageName: node + linkType: hard + +"universalify@npm:^2.0.0": + version: 2.0.1 + resolution: "universalify@npm:2.0.1" + checksum: 10c0/73e8ee3809041ca8b818efb141801a1004e3fc0002727f1531f4de613ea281b494a40909596dae4a042a4fb6cd385af5d4db2e137b1362e0e91384b828effd3a + languageName: node + linkType: hard + +"uri-js@npm:^4.2.2, uri-js@npm:^4.4.1": + version: 4.4.1 + resolution: "uri-js@npm:4.4.1" + dependencies: + punycode: "npm:^2.1.0" + checksum: 10c0/4ef57b45aa820d7ac6496e9208559986c665e49447cb072744c13b66925a362d96dd5a46c4530a6b8e203e5db5fe849369444440cb22ecfc26c679359e5dfa3c + languageName: node + linkType: hard + +"url-join@npm:^4.0.1": + version: 4.0.1 + resolution: "url-join@npm:4.0.1" + checksum: 10c0/ac65e2c7c562d7b49b68edddcf55385d3e922bc1dd5d90419ea40b53b6de1607d1e45ceb71efb9d60da02c681d13c6cb3a1aa8b13fc0c989dfc219df97ee992d + languageName: node + linkType: hard + +"url-join@npm:^5.0.0": + version: 5.0.0 + resolution: "url-join@npm:5.0.0" + checksum: 10c0/ed2b166b4b5a98adcf6828a48b6bd6df1dac4c8a464a73cf4d8e2457ed410dd8da6be0d24855b86026cd7f5c5a3657c1b7b2c7a7c5b8870af17635a41387b04c + languageName: node + linkType: hard + +"v8-compile-cache-lib@npm:^3.0.1": + version: 3.0.1 + resolution: "v8-compile-cache-lib@npm:3.0.1" + checksum: 10c0/bdc36fb8095d3b41df197f5fb6f11e3a26adf4059df3213e3baa93810d8f0cc76f9a74aaefc18b73e91fe7e19154ed6f134eda6fded2e0f1c8d2272ed2d2d391 + languageName: node + linkType: hard + +"v8-to-istanbul@npm:^9.0.0": + version: 9.3.0 + resolution: "v8-to-istanbul@npm:9.3.0" + dependencies: + "@jridgewell/trace-mapping": "npm:^0.3.12" + "@types/istanbul-lib-coverage": "npm:^2.0.1" + convert-source-map: "npm:^2.0.0" + checksum: 10c0/968bcf1c7c88c04df1ffb463c179558a2ec17aa49e49376120504958239d9e9dad5281aa05f2a78542b8557f2be0b0b4c325710262f3b838b40d703d5ed30c23 + languageName: node + linkType: hard + +"validate-npm-package-name@npm:^5.0.1": + version: 5.0.1 + resolution: "validate-npm-package-name@npm:5.0.1" + checksum: 10c0/903e738f7387404bb72f7ac34e45d7010c877abd2803dc2d614612527927a40a6d024420033132e667b1bade94544b8a1f65c9431a4eb30d0ce0d80093cd1f74 + languageName: node + linkType: hard + +"vite-plugin-dts@npm:^4.5.4": + version: 4.5.4 + resolution: "vite-plugin-dts@npm:4.5.4" + dependencies: + "@microsoft/api-extractor": "npm:^7.50.1" + "@rollup/pluginutils": "npm:^5.1.4" + "@volar/typescript": "npm:^2.4.11" + "@vue/language-core": "npm:2.2.0" + compare-versions: "npm:^6.1.1" + debug: "npm:^4.4.0" + kolorist: "npm:^1.8.0" + local-pkg: "npm:^1.0.0" + magic-string: "npm:^0.30.17" + peerDependencies: + typescript: "*" + vite: "*" + peerDependenciesMeta: + vite: + optional: true + checksum: 10c0/5fcb7f3739d115f36195a692c0e9f9fca4e504bbbbabe29e71ee06630dd05ea2920169371e80e548eb4779d2eca14107277497838d7df588d53e1fadf84be861 + languageName: node + linkType: hard + +"vite@npm:^7.1.9": + version: 7.1.9 + resolution: "vite@npm:7.1.9" + dependencies: + esbuild: "npm:^0.25.0" + fdir: "npm:^6.5.0" + fsevents: "npm:~2.3.3" + picomatch: "npm:^4.0.3" + postcss: "npm:^8.5.6" + rollup: "npm:^4.43.0" + tinyglobby: "npm:^0.2.15" + peerDependencies: + "@types/node": ^20.19.0 || >=22.12.0 + jiti: ">=1.21.0" + less: ^4.0.0 + lightningcss: ^1.21.0 + sass: ^1.70.0 + sass-embedded: ^1.70.0 + stylus: ">=0.54.8" + sugarss: ^5.0.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + dependenciesMeta: + fsevents: + optional: true + peerDependenciesMeta: + "@types/node": + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + bin: + vite: bin/vite.js + checksum: 10c0/f628f903a137c1410232558bde99c223ea00a090bda6af77752c61f912955f0050aac12d3cfe024d08a0f150ff6fab61b3d0be75d634a59b94d49f525392e1f7 + languageName: node + linkType: hard + +"vscode-uri@npm:^3.0.8": + version: 3.1.0 + resolution: "vscode-uri@npm:3.1.0" + checksum: 10c0/5f6c9c10fd9b1664d71fab4e9fbbae6be93c7f75bb3a1d9d74399a88ab8649e99691223fd7cef4644376cac6e94fa2c086d802521b9a8e31c5af3e60f0f35624 + languageName: node + linkType: hard + +"whatwg-encoding@npm:^2.0.0": + version: 2.0.0 + resolution: "whatwg-encoding@npm:2.0.0" + dependencies: + iconv-lite: "npm:0.6.3" + checksum: 10c0/91b90a49f312dc751496fd23a7e68981e62f33afe938b97281ad766235c4872fc4e66319f925c5e9001502b3040dd25a33b02a9c693b73a4cbbfdc4ad10c3e3e + languageName: node + linkType: hard + +"which-boxed-primitive@npm:^1.1.0, which-boxed-primitive@npm:^1.1.1": + version: 1.1.1 + resolution: "which-boxed-primitive@npm:1.1.1" + dependencies: + is-bigint: "npm:^1.1.0" + is-boolean-object: "npm:^1.2.1" + is-number-object: "npm:^1.1.1" + is-string: "npm:^1.1.1" + is-symbol: "npm:^1.1.1" + checksum: 10c0/aceea8ede3b08dede7dce168f3883323f7c62272b49801716e8332ff750e7ae59a511ae088840bc6874f16c1b7fd296c05c949b0e5b357bfe3c431b98c417abe + languageName: node + linkType: hard + +"which-builtin-type@npm:^1.2.1": + version: 1.2.1 + resolution: "which-builtin-type@npm:1.2.1" + dependencies: + call-bound: "npm:^1.0.2" + function.prototype.name: "npm:^1.1.6" + has-tostringtag: "npm:^1.0.2" + is-async-function: "npm:^2.0.0" + is-date-object: "npm:^1.1.0" + is-finalizationregistry: "npm:^1.1.0" + is-generator-function: "npm:^1.0.10" + is-regex: "npm:^1.2.1" + is-weakref: "npm:^1.0.2" + isarray: "npm:^2.0.5" + which-boxed-primitive: "npm:^1.1.0" + which-collection: "npm:^1.0.2" + which-typed-array: "npm:^1.1.16" + checksum: 10c0/8dcf323c45e5c27887800df42fbe0431d0b66b1163849bb7d46b5a730ad6a96ee8bfe827d078303f825537844ebf20c02459de41239a0a9805e2fcb3cae0d471 + languageName: node + linkType: hard + +"which-collection@npm:^1.0.2": + version: 1.0.2 + resolution: "which-collection@npm:1.0.2" + dependencies: + is-map: "npm:^2.0.3" + is-set: "npm:^2.0.3" + is-weakmap: "npm:^2.0.2" + is-weakset: "npm:^2.0.3" + checksum: 10c0/3345fde20964525a04cdf7c4a96821f85f0cc198f1b2ecb4576e08096746d129eb133571998fe121c77782ac8f21cbd67745a3d35ce100d26d4e684c142ea1f2 + languageName: node + linkType: hard + +"which-typed-array@npm:^1.1.16, which-typed-array@npm:^1.1.19": + version: 1.1.19 + resolution: "which-typed-array@npm:1.1.19" + dependencies: + available-typed-arrays: "npm:^1.0.7" + call-bind: "npm:^1.0.8" + call-bound: "npm:^1.0.4" + for-each: "npm:^0.3.5" + get-proto: "npm:^1.0.1" + gopd: "npm:^1.2.0" + has-tostringtag: "npm:^1.0.2" + checksum: 10c0/702b5dc878addafe6c6300c3d0af5983b175c75fcb4f2a72dfc3dd38d93cf9e89581e4b29c854b16ea37e50a7d7fca5ae42ece5c273d8060dcd603b2404bbb3f + languageName: node + linkType: hard + +"which@npm:^2.0.1": + version: 2.0.2 + resolution: "which@npm:2.0.2" + dependencies: + isexe: "npm:^2.0.0" + bin: + node-which: ./bin/node-which + checksum: 10c0/66522872a768b60c2a65a57e8ad184e5372f5b6a9ca6d5f033d4b0dc98aff63995655a7503b9c0a2598936f532120e81dd8cc155e2e92ed662a2b9377cc4374f + languageName: node + linkType: hard + +"which@npm:^5.0.0": + version: 5.0.0 + resolution: "which@npm:5.0.0" + dependencies: + isexe: "npm:^3.1.1" + bin: + node-which: bin/which.js + checksum: 10c0/e556e4cd8b7dbf5df52408c9a9dd5ac6518c8c5267c8953f5b0564073c66ed5bf9503b14d876d0e9c7844d4db9725fb0dcf45d6e911e17e26ab363dc3965ae7b + languageName: node + linkType: hard + +"word-wrap@npm:^1.2.5": + version: 1.2.5 + resolution: "word-wrap@npm:1.2.5" + checksum: 10c0/e0e4a1ca27599c92a6ca4c32260e8a92e8a44f4ef6ef93f803f8ed823f486e0889fc0b93be4db59c8d51b3064951d25e43d434e95dc8c960cc3a63d65d00ba20 + languageName: node + linkType: hard + +"workerpool@npm:^9.2.0": + version: 9.3.4 + resolution: "workerpool@npm:9.3.4" + checksum: 10c0/b09d80c81c6e50dab1bc6cc3a4180d4222068f17ada9b04fb7053bf98fdbe3dbd6bdd04ad1420363f5391cbf57d622ecd2680469ad0137aef990f510ab807a09 + languageName: node + linkType: hard + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0": + version: 7.0.0 + resolution: "wrap-ansi@npm:7.0.0" + dependencies: + ansi-styles: "npm:^4.0.0" + string-width: "npm:^4.1.0" + strip-ansi: "npm:^6.0.0" + checksum: 10c0/d15fc12c11e4cbc4044a552129ebc75ee3f57aa9c1958373a4db0292d72282f54373b536103987a4a7594db1ef6a4f10acf92978f79b98c49306a4b58c77d4da + languageName: node + linkType: hard + +"wrap-ansi@npm:^8.1.0": + version: 8.1.0 + resolution: "wrap-ansi@npm:8.1.0" + dependencies: + ansi-styles: "npm:^6.1.0" + string-width: "npm:^5.0.1" + strip-ansi: "npm:^7.0.1" + checksum: 10c0/138ff58a41d2f877eae87e3282c0630fc2789012fc1af4d6bd626eeb9a2f9a65ca92005e6e69a75c7b85a68479fe7443c7dbe1eb8fbaa681a4491364b7c55c60 + languageName: node + linkType: hard + +"wrappy@npm:1": + version: 1.0.2 + resolution: "wrappy@npm:1.0.2" + checksum: 10c0/56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 + languageName: node + linkType: hard + +"y18n@npm:^5.0.5": + version: 5.0.8 + resolution: "y18n@npm:5.0.8" + checksum: 10c0/4df2842c36e468590c3691c894bc9cdbac41f520566e76e24f59401ba7d8b4811eb1e34524d57e54bc6d864bcb66baab7ffd9ca42bf1eda596618f9162b91249 + languageName: node + linkType: hard + +"yallist@npm:^4.0.0": + version: 4.0.0 + resolution: "yallist@npm:4.0.0" + checksum: 10c0/2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a + languageName: node + linkType: hard + +"yallist@npm:^5.0.0": + version: 5.0.0 + resolution: "yallist@npm:5.0.0" + checksum: 10c0/a499c81ce6d4a1d260d4ea0f6d49ab4da09681e32c3f0472dee16667ed69d01dae63a3b81745a24bd78476ec4fcf856114cb4896ace738e01da34b2c42235416 + languageName: node + linkType: hard + +"yaml@npm:^2.8.1": + version: 2.8.1 + resolution: "yaml@npm:2.8.1" + bin: + yaml: bin.mjs + checksum: 10c0/7c587be00d9303d2ae1566e03bc5bc7fe978ba0d9bf39cc418c3139d37929dfcb93a230d9749f2cb578b6aa5d9ebebc322415e4b653cb83acd8bc0bc321707f3 + languageName: node + linkType: hard + +"yargs-parser@npm:^21.1.1": + version: 21.1.1 + resolution: "yargs-parser@npm:21.1.1" + checksum: 10c0/f84b5e48169479d2f402239c59f084cfd1c3acc197a05c59b98bab067452e6b3ea46d4dd8ba2985ba7b3d32a343d77df0debd6b343e5dae3da2aab2cdf5886b2 + languageName: node + linkType: hard + +"yargs-unparser@npm:^2.0.0": + version: 2.0.0 + resolution: "yargs-unparser@npm:2.0.0" + dependencies: + camelcase: "npm:^6.0.0" + decamelize: "npm:^4.0.0" + flat: "npm:^5.0.2" + is-plain-obj: "npm:^2.1.0" + checksum: 10c0/a5a7d6dc157efa95122e16780c019f40ed91d4af6d2bac066db8194ed0ec5c330abb115daa5a79ff07a9b80b8ea80c925baacf354c4c12edd878c0529927ff03 + languageName: node + linkType: hard + +"yargs@npm:17.7.2, yargs@npm:^17.7.2": + version: 17.7.2 + resolution: "yargs@npm:17.7.2" + dependencies: + cliui: "npm:^8.0.1" + escalade: "npm:^3.1.1" + get-caller-file: "npm:^2.0.5" + require-directory: "npm:^2.1.1" + string-width: "npm:^4.2.3" + y18n: "npm:^5.0.5" + yargs-parser: "npm:^21.1.1" + checksum: 10c0/ccd7e723e61ad5965fffbb791366db689572b80cca80e0f96aad968dfff4156cd7cd1ad18607afe1046d8241e6fb2d6c08bf7fa7bfb5eaec818735d8feac8f05 + languageName: node + linkType: hard + +"yn@npm:3.1.1": + version: 3.1.1 + resolution: "yn@npm:3.1.1" + checksum: 10c0/0732468dd7622ed8a274f640f191f3eaf1f39d5349a1b72836df484998d7d9807fbea094e2f5486d6b0cd2414aad5775972df0e68f8604db89a239f0f4bf7443 + languageName: node + linkType: hard + +"yocto-queue@npm:^0.1.0": + version: 0.1.0 + resolution: "yocto-queue@npm:0.1.0" + checksum: 10c0/dceb44c28578b31641e13695d200d34ec4ab3966a5729814d5445b194933c096b7ced71494ce53a0e8820685d1d010df8b2422e5bf2cdea7e469d97ffbea306f + languageName: node + linkType: hard + +"zod-package-json@npm:^1.0.3": + version: 1.2.0 + resolution: "zod-package-json@npm:1.2.0" + dependencies: + zod: "npm:^3.25.64" + checksum: 10c0/78a2104231c8c60b00adc8cf9acfae0c0c5cfc77f0be9225297a41bd73f52b21d17b1208358e0dfe7bd558fb4ba1cc6743c2bdd111280831be00f0c3ecdfc1f4 + languageName: node + linkType: hard + +"zod@npm:^3.23.8, zod@npm:^3.25.64": + version: 3.25.76 + resolution: "zod@npm:3.25.76" + checksum: 10c0/5718ec35e3c40b600316c5b4c5e4976f7fee68151bc8f8d90ec18a469be9571f072e1bbaace10f1e85cf8892ea12d90821b200e980ab46916a6166a4260a983c + languageName: node + linkType: hard