From 7fbd804d9c500fcfd37556ae5f94cdc4dd36163e Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 2 Apr 2026 20:37:07 +0000 Subject: [PATCH 1/5] feat: add remote imports (public-schemas, plugin-schema) with cache and security - Add public-schemas/ directory with platform schemas (vercel, netlify, cloudflare) - Implement public-schemas: import protocol with GitHub-based fetch and cache - Implement plugin-schema: import protocol for installed plugin packages - Add security restrictions for remote imports (no @plugin, no local file access) - Add `varlock cache clear` CLI command for clearing schemas and plugins cache - Add tests for remote imports and cache clearing Agent-Logs-Url: https://github.com/dmno-dev/varlock/sessions/e612b56c-9a06-4feb-867c-370c7a82ea6f Co-authored-by: theoephraim <1158956+theoephraim@users.noreply.github.com> --- packages/varlock/src/cli/cli-executable.ts | 2 + .../varlock/src/cli/commands/cache.command.ts | 60 ++++++ .../varlock/src/env-graph/lib/data-source.ts | 75 +++++++- .../src/env-graph/lib/plugin-schema.ts | 92 +++++++++ .../src/env-graph/test/remote-import.test.ts | 116 ++++++++++++ packages/varlock/src/lib/schema-cache.ts | 175 ++++++++++++++++++ .../varlock/src/lib/test/schema-cache.test.ts | 67 +++++++ public-schemas/README.md | 32 ++++ .../platforms/.env.cloudflare-pages | 27 +++ .../platforms/.env.cloudflare-wrangler | 49 +++++ public-schemas/platforms/.env.netlify | 105 +++++++++++ public-schemas/platforms/.env.vercel | 130 +++++++++++++ 12 files changed, 929 insertions(+), 1 deletion(-) create mode 100644 packages/varlock/src/cli/commands/cache.command.ts create mode 100644 packages/varlock/src/env-graph/lib/plugin-schema.ts create mode 100644 packages/varlock/src/env-graph/test/remote-import.test.ts create mode 100644 packages/varlock/src/lib/schema-cache.ts create mode 100644 packages/varlock/src/lib/test/schema-cache.test.ts create mode 100644 public-schemas/README.md create mode 100644 public-schemas/platforms/.env.cloudflare-pages create mode 100644 public-schemas/platforms/.env.cloudflare-wrangler create mode 100644 public-schemas/platforms/.env.netlify create mode 100644 public-schemas/platforms/.env.vercel diff --git a/packages/varlock/src/cli/cli-executable.ts b/packages/varlock/src/cli/cli-executable.ts index 9e53bc73..05c30ef7 100644 --- a/packages/varlock/src/cli/cli-executable.ts +++ b/packages/varlock/src/cli/cli-executable.ts @@ -20,6 +20,7 @@ import { commandSpec as helpCommandSpec } from './commands/help.command'; import { commandSpec as telemetryCommandSpec } from './commands/telemetry.command'; import { commandSpec as scanCommandSpec } from './commands/scan.command'; import { commandSpec as typegenCommandSpec } from './commands/typegen.command'; +import { commandSpec as cacheCommandSpec } from './commands/cache.command'; // import { commandSpec as loginCommandSpec } from './commands/login.command'; // import { commandSpec as pluginCommandSpec } from './commands/plugin.command'; @@ -55,6 +56,7 @@ subCommands.set('help', buildLazyCommand(helpCommandSpec, async () => await impo subCommands.set('telemetry', buildLazyCommand(telemetryCommandSpec, async () => await import('./commands/telemetry.command'))); subCommands.set('scan', buildLazyCommand(scanCommandSpec, async () => await import('./commands/scan.command'))); subCommands.set('typegen', buildLazyCommand(typegenCommandSpec, async () => await import('./commands/typegen.command'))); +subCommands.set('cache', buildLazyCommand(cacheCommandSpec, async () => await import('./commands/cache.command'))); // subCommands.set('login', buildLazyCommand(loginCommandSpec, async () => await import('./commands/login.command'))); // subCommands.set('plugin', buildLazyCommand(pluginCommandSpec, async () => await import('./commands/plugin.command'))); diff --git a/packages/varlock/src/cli/commands/cache.command.ts b/packages/varlock/src/cli/commands/cache.command.ts new file mode 100644 index 00000000..7b0f84c0 --- /dev/null +++ b/packages/varlock/src/cli/commands/cache.command.ts @@ -0,0 +1,60 @@ +import { define } from 'gunshi'; +import { type TypedGunshiCommandFn } from '../helpers/gunshi-type-utils'; +import { CliExitError } from '../helpers/exit-error'; +import { clearSchemasCache, clearPluginsCache, clearAllCaches } from '../../lib/schema-cache'; + +export const commandSpec = define({ + name: 'cache', + description: 'Manage cached schemas and plugins', + args: { + action: { + type: 'positional', + description: '"clear" to clear all caches', + }, + target: { + type: 'positional', + description: '"schemas", "plugins", or "all" (default: all)', + }, + }, + examples: ` +Manage cached data for remote schemas and downloaded plugins. + +Examples: + varlock cache clear # Clear all caches (schemas + plugins) + varlock cache clear schemas # Clear only the schemas cache + varlock cache clear plugins # Clear only the plugins cache + `.trim(), +}); + +export const commandFn: TypedGunshiCommandFn = async (ctx) => { + const { action, target } = ctx.values; + + if (action !== 'clear') { + throw new CliExitError('First argument must be "clear"', { + forceExit: true, + }); + } + + const cacheTarget = target || 'all'; + if (!['schemas', 'plugins', 'all'].includes(cacheTarget)) { + throw new CliExitError('Cache target must be "schemas", "plugins", or "all"', { + forceExit: true, + }); + } + + try { + if (cacheTarget === 'schemas') { + await clearSchemasCache(); + console.log('✅ Schemas cache cleared'); + } else if (cacheTarget === 'plugins') { + await clearPluginsCache(); + console.log('✅ Plugins cache cleared'); + } else { + await clearAllCaches(); + console.log('✅ All caches cleared (schemas + plugins)'); + } + } catch (error) { + console.error('Failed to clear cache:', error); + throw new CliExitError('Failed to clear cache', { forceExit: true }); + } +}; diff --git a/packages/varlock/src/env-graph/lib/data-source.ts b/packages/varlock/src/env-graph/lib/data-source.ts index efe7c705..cd8471e2 100644 --- a/packages/varlock/src/env-graph/lib/data-source.ts +++ b/packages/varlock/src/env-graph/lib/data-source.ts @@ -16,6 +16,8 @@ import { pathExists } from '@env-spec/utils/fs-utils'; import { processPluginInstallDecorators } from './plugins'; import { RootDecoratorInstance } from './decorators'; import { isBuiltinVar } from './builtin-vars'; +import { fetchPublicSchema } from '../../lib/schema-cache'; +import { resolvePluginSchema } from './plugin-schema'; const DATA_SOURCE_TYPES = Object.freeze({ schema: { @@ -56,10 +58,16 @@ export abstract class EnvGraphDataSource { importKeys?: Array, /** true when the @import had a non-static `enabled` parameter (e.g. `enabled=forEnv("dev")`) */ isConditionallyEnabled?: boolean, + /** true when the source was imported from a remote protocol (public-schemas:, plugin-schema:) */ + isRemoteImport?: boolean, }; get isImport(): boolean { return !!this.importMeta?.isImport || !!this.parent?.isImport; } + /** true if this data source (or any ancestor) was imported from a remote protocol */ + get isRemoteImport(): boolean { + return !!this.importMeta?.isRemoteImport || !!this.parent?.isRemoteImport; + } get isPartialImport() { return (this.importKeys || []).length > 0; } @@ -302,7 +310,16 @@ export abstract class EnvGraphDataSource { const defaultRequiredDec = this.getRootDec('defaultRequired'); await defaultRequiredDec?.process(); - await processPluginInstallDecorators(this); + // Security: remotely imported files cannot install plugins + if (this.isRemoteImport) { + const pluginDecs = this.getRootDecFns('plugin'); + if (pluginDecs.length) { + this._loadingError = new Error('Remotely imported schemas cannot install plugins (@plugin is not allowed)'); + return; + } + } else { + await processPluginInstallDecorators(this); + } } /** @@ -347,15 +364,27 @@ export abstract class EnvGraphDataSource { // determine the full import path based on path type let fullImportPath: string | undefined; if (importPath.startsWith('./') || importPath.startsWith('../')) { + // Security: remote imports cannot access local files + if (this.isRemoteImport) { + throw new Error('Remotely imported schemas cannot use local file imports'); + } // eslint-disable-next-line no-use-before-define if (!(this instanceof FileBasedDataSource)) { throw new Error('@import of files can only be used from a file-based data source'); } fullImportPath = path.resolve(this.fullPath, '..', importPath); } else if (importPath.startsWith('~/') || importPath === '~') { + // Security: remote imports cannot access local files + if (this.isRemoteImport) { + throw new Error('Remotely imported schemas cannot use local file imports'); + } // expand ~ to home directory (treat like absolute path) fullImportPath = path.join(os.homedir(), importPath.slice(1)); } else if (importPath.startsWith('/')) { + // Security: remote imports cannot access local files + if (this.isRemoteImport) { + throw new Error('Remotely imported schemas cannot use local file imports'); + } // absolute path fullImportPath = importPath; } @@ -447,6 +476,50 @@ export abstract class EnvGraphDataSource { }); } } + } else if (importPath.startsWith('public-schemas:')) { + // Remote import from official varlock public schemas + const schemaPath = importPath.slice('public-schemas:'.length); + if (!schemaPath || schemaPath.includes('..')) { + this._loadingError = new Error(`Invalid public schema path: ${schemaPath}`); + return; + } + try { + const contents = await fetchPublicSchema(schemaPath); + // Use a synthetic path for the data source + const syntheticPath = `.env.public-schema-${schemaPath.replace(/\//g, '-')}`; + // eslint-disable-next-line no-use-before-define + const source = new DotEnvFileDataSource(syntheticPath, { overrideContents: contents }); + await this.addChild(source, { + isImport: true, importKeys, isConditionallyEnabled, isRemoteImport: true, + }); + } catch (fetchErr) { + if (allowMissing) continue; + this._loadingError = new Error(`Failed to fetch public schema "${schemaPath}": ${(fetchErr as Error).message}`); + return; + } + } else if (importPath.startsWith('plugin-schema:')) { + // Import schema from an installed plugin package + const pluginName = importPath.slice('plugin-schema:'.length); + if (!pluginName) { + this._loadingError = new Error('plugin-schema: import must specify a plugin name'); + return; + } + try { + // eslint-disable-next-line no-use-before-define + const schemaSource = await resolvePluginSchema(pluginName, this instanceof FileBasedDataSource ? this : undefined); + if (!schemaSource) { + if (allowMissing) continue; + this._loadingError = new Error(`Plugin "${pluginName}" does not expose a schema file`); + return; + } + await this.addChild(schemaSource, { + isImport: true, importKeys, isConditionallyEnabled, isRemoteImport: true, + }); + } catch (pluginErr) { + if (allowMissing) continue; + this._loadingError = new Error(`Failed to resolve plugin schema "${pluginName}": ${(pluginErr as Error).message}`); + return; + } } else if (importPath.startsWith('http://') || importPath.startsWith('https://')) { this._loadingError = new Error('http imports not supported yet'); return; diff --git a/packages/varlock/src/env-graph/lib/plugin-schema.ts b/packages/varlock/src/env-graph/lib/plugin-schema.ts new file mode 100644 index 00000000..1f26179e --- /dev/null +++ b/packages/varlock/src/env-graph/lib/plugin-schema.ts @@ -0,0 +1,92 @@ +import path from 'node:path'; +import fs from 'node:fs/promises'; +import { pathExists } from '@env-spec/utils/fs-utils'; +import { getWorkspaceInfo } from '../../lib/workspace-utils'; + +// Lazy import to avoid circular dependency +import type { FileBasedDataSource, EnvGraphDataSource } from './data-source'; + +/** + * Resolves a schema file (.env.schema) from an installed plugin package. + * + * Looks for a schema file exported via the "env-schema" field in the plugin's package.json, + * falling back to looking for a `.env.schema` file in the package root. + * + * @param pluginName - The npm package name (e.g., "@varlock/1password-plugin") + * @param fileDataSource - The data source from which the import was triggered (for resolving node_modules) + * @returns A DotEnvFileDataSource for the schema, or undefined if no schema found + */ +export async function resolvePluginSchema( + pluginName: string, + fileDataSource?: FileBasedDataSource, +): Promise { + // Lazy import to avoid circular dependency at module level + const { DotEnvFileDataSource } = await import('./data-source'); + + const workspaceRootPath = getWorkspaceInfo()?.rootPath; + + // Start from the file data source's directory, or cwd + let startDir: string; + if (fileDataSource) { + startDir = path.dirname(fileDataSource.fullPath); + } else { + startDir = process.cwd(); + } + + // Walk up the directory tree looking for the plugin in node_modules + let currentDir = startDir; + while (currentDir) { + const candidatePath = path.join(currentDir, 'node_modules', pluginName); + if (await pathExists(candidatePath)) { + // Found the plugin package - look for its schema + const schemaPath = await findPluginSchemaFile(candidatePath); + if (schemaPath) { + return new DotEnvFileDataSource(schemaPath); + } + return undefined; + } + + // Stop at the workspace root + if (workspaceRootPath && currentDir === workspaceRootPath) break; + + const parentDir = path.dirname(currentDir); + if (parentDir === currentDir) break; + currentDir = parentDir; + } + + throw new Error(`Plugin package "${pluginName}" not found in node_modules`); +} + +/** + * Finds the schema file in a plugin package. + * + * Checks: + * 1. "env-schema" field in package.json (points to a file relative to package root) + * 2. .env.schema file in the package root + */ +async function findPluginSchemaFile(packageDir: string): Promise { + const pkgJsonPath = path.join(packageDir, 'package.json'); + + try { + const pkgJsonContent = await fs.readFile(pkgJsonPath, 'utf-8'); + const pkgJson = JSON.parse(pkgJsonContent); + + // Check for explicit "env-schema" field + if (pkgJson['env-schema']) { + const schemaFilePath = path.resolve(packageDir, pkgJson['env-schema']); + if (await pathExists(schemaFilePath)) { + return schemaFilePath; + } + } + } catch { + // No package.json or couldn't read it + } + + // Fallback: look for .env.schema in the package root + const defaultSchemaPath = path.join(packageDir, '.env.schema'); + if (await pathExists(defaultSchemaPath)) { + return defaultSchemaPath; + } + + return undefined; +} diff --git a/packages/varlock/src/env-graph/test/remote-import.test.ts b/packages/varlock/src/env-graph/test/remote-import.test.ts new file mode 100644 index 00000000..f8f53994 --- /dev/null +++ b/packages/varlock/src/env-graph/test/remote-import.test.ts @@ -0,0 +1,116 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import outdent from 'outdent'; +import path from 'node:path'; +import { + EnvGraph, DirectoryDataSource, DotEnvFileDataSource, +} from '../../index'; + +// Helper similar to envFilesTest but for testing graph loading +async function loadGraph(spec: { + files: Record; + fallbackEnv?: string; +}) { + const currentDir = path.dirname(expect.getState().testPath!); + vi.spyOn(process, 'cwd').mockReturnValue(currentDir); + + const g = new EnvGraph(); + if (spec.fallbackEnv) g.envFlagFallback = spec.fallbackEnv; + g.setVirtualImports(currentDir, spec.files); + const source = new DirectoryDataSource(currentDir); + await g.setRootDataSource(source); + await g.finishLoad(); + return g; +} + +describe('remote imports - security', () => { + beforeEach(() => { + vi.restoreAllMocks(); + }); + afterEach(() => { + vi.restoreAllMocks(); + }); + + test('public-schemas: import with invalid path (contains ..) should error', async () => { + // Mock the fetch to avoid actual network calls + const mockFetch = vi.fn().mockRejectedValue(new Error('should not be called')); + vi.doMock('../../lib/schema-cache', () => ({ + fetchPublicSchema: mockFetch, + })); + + const g = await loadGraph({ + files: { + '.env.schema': outdent` + # @import(public-schemas:../../../etc/passwd) + # --- + ITEM1=value + `, + }, + }); + + // Should have a loading error due to the invalid path + expect( + g.sortedDataSources.some((s) => s.loadingError), + 'Expected a loading error for invalid path', + ).toBeTruthy(); + }); + + test('http imports should still show not supported error', async () => { + const g = await loadGraph({ + files: { + '.env.schema': outdent` + # @import(https://example.com/.env.schema) + # --- + ITEM1=value + `, + }, + }); + + const errorSource = g.sortedDataSources.find((s) => s.loadingError); + expect(errorSource?.loadingError?.message).toContain('http imports not supported yet'); + }); + + test('npm imports should still show not supported error', async () => { + const g = await loadGraph({ + files: { + '.env.schema': outdent` + # @import(npm:some-package@1.0.0/.env) + # --- + ITEM1=value + `, + }, + }); + + const errorSource = g.sortedDataSources.find((s) => s.loadingError); + expect(errorSource?.loadingError?.message).toContain('npm imports not supported yet'); + }); + + test('unsupported import protocol should error', async () => { + const g = await loadGraph({ + files: { + '.env.schema': outdent` + # @import(ftp://example.com/.env.schema) + # --- + ITEM1=value + `, + }, + }); + + const errorSource = g.sortedDataSources.find((s) => s.loadingError); + expect(errorSource?.loadingError?.message).toContain('unsupported import type'); + }); + + test('plugin-schema: import with empty plugin name should error', async () => { + const g = await loadGraph({ + files: { + '.env.schema': outdent` + # @import(plugin-schema:) + # --- + ITEM1=value + `, + }, + }); + + const errorSource = g.sortedDataSources.find((s) => s.loadingError); + expect(errorSource?.loadingError?.message).toContain('must specify a plugin name'); + }); +}); diff --git a/packages/varlock/src/lib/schema-cache.ts b/packages/varlock/src/lib/schema-cache.ts new file mode 100644 index 00000000..e09f05f2 --- /dev/null +++ b/packages/varlock/src/lib/schema-cache.ts @@ -0,0 +1,175 @@ +import path from 'node:path'; +import fs from 'node:fs/promises'; +import https from 'node:https'; +import crypto from 'node:crypto'; +import { getUserVarlockDir } from '../../lib/user-config-dir'; + +// GitHub raw content base URL for the varlock repo public-schemas +const PUBLIC_SCHEMAS_GITHUB_BASE = 'https://raw.githubusercontent.com/dmno-dev/varlock/main/public-schemas'; + +// Default TTL for cached schemas (24 hours in milliseconds) +const DEFAULT_CACHE_TTL_MS = 24 * 60 * 60 * 1000; + +export interface SchemaCacheEntry { + /** path within the public-schemas directory (e.g., "platforms/vercel") */ + schemaPath: string; + /** local file name in the cache folder */ + localFileName: string; + /** timestamp when the schema was cached */ + cachedAt: number; + /** hash of the content for integrity checks */ + contentHash: string; +} + +export interface SchemaCacheIndex { + entries: Record; +} + +function getSchemasCacheDir() { + return path.join(getUserVarlockDir(), 'schemas-cache'); +} + +function getSchemasCacheIndexPath() { + return path.join(getSchemasCacheDir(), 'index.json'); +} + +async function loadCacheIndex(): Promise { + try { + const raw = await fs.readFile(getSchemasCacheIndexPath(), 'utf-8'); + return JSON.parse(raw); + } catch { + return { entries: {} }; + } +} + +async function saveCacheIndex(index: SchemaCacheIndex) { + const cacheDir = getSchemasCacheDir(); + await fs.mkdir(cacheDir, { recursive: true }); + await fs.writeFile(getSchemasCacheIndexPath(), JSON.stringify(index, null, 2)); +} + +function fetchUrl(url: string): Promise { + return new Promise((resolve, reject) => { + https.get(url, (res) => { + if (res.statusCode === 301 || res.statusCode === 302) { + const redirectUrl = res.headers.location; + if (redirectUrl) { + fetchUrl(redirectUrl).then(resolve, reject); + return; + } + } + if (res.statusCode !== 200) { + reject(new Error(`Failed to fetch schema: HTTP ${res.statusCode} from ${url}`)); + return; + } + let data = ''; + res.on('data', (chunk) => { data += chunk; }); + res.on('end', () => resolve(data)); + res.on('error', reject); + }).on('error', reject); + }); +} + +function hashContent(content: string): string { + return crypto.createHash('sha256').update(content).digest('hex').slice(0, 16); +} + +/** + * Fetches a public schema file, using cache when available and fresh. + * + * @param schemaPath - Path within public-schemas (e.g., "platforms/vercel") + * @returns The schema file contents + */ +export async function fetchPublicSchema(schemaPath: string): Promise { + const cacheDir = getSchemasCacheDir(); + await fs.mkdir(cacheDir, { recursive: true }); + + const index = await loadCacheIndex(); + const cached = index.entries[schemaPath]; + + // Check if we have a fresh cached version + if (cached) { + const age = Date.now() - cached.cachedAt; + if (age < DEFAULT_CACHE_TTL_MS) { + const cachedFilePath = path.join(cacheDir, cached.localFileName); + try { + return await fs.readFile(cachedFilePath, 'utf-8'); + } catch { + // Cache file missing, fall through to re-fetch + } + } + } + + // Fetch from GitHub + const url = `${PUBLIC_SCHEMAS_GITHUB_BASE}/${schemaPath.replace(/^\//, '')}`; + // The actual file on disk is named .env., so we need to construct the URL properly + // schemaPath is like "platforms/vercel" -> fetch "platforms/.env.vercel" + const parts = schemaPath.split('/'); + const name = parts.pop()!; + const dir = parts.join('/'); + const fileUrl = `${PUBLIC_SCHEMAS_GITHUB_BASE}/${dir}/.env.${name}`; + + const content = await fetchUrl(fileUrl); + + // Save to cache + const contentHash = hashContent(content); + const localFileName = `${schemaPath.replace(/\//g, '_')}_${contentHash}.env`; + const localFilePath = path.join(cacheDir, localFileName); + + await fs.writeFile(localFilePath, content); + + // Clean up old cached file if it exists + if (cached && cached.localFileName !== localFileName) { + try { + await fs.rm(path.join(cacheDir, cached.localFileName), { force: true }); + } catch { + // ignore cleanup errors + } + } + + // Update index + index.entries[schemaPath] = { + schemaPath, + localFileName, + cachedAt: Date.now(), + contentHash, + }; + await saveCacheIndex(index); + + return content; +} + + +/** + * Clears the schemas cache directory + */ +export async function clearSchemasCache() { + const cacheDir = getSchemasCacheDir(); + try { + await fs.rm(cacheDir, { recursive: true, force: true }); + } catch { + // ignore if doesn't exist + } +} + +/** + * Clears the plugins cache directory + */ +export async function clearPluginsCache() { + const pluginsCacheDir = path.join(getUserVarlockDir(), 'plugins-cache'); + try { + await fs.rm(pluginsCacheDir, { recursive: true, force: true }); + } catch { + // ignore if doesn't exist + } +} + +/** + * Clears all caches (schemas + plugins) + */ +export async function clearAllCaches() { + await Promise.all([ + clearSchemasCache(), + clearPluginsCache(), + ]); +} diff --git a/packages/varlock/src/lib/test/schema-cache.test.ts b/packages/varlock/src/lib/test/schema-cache.test.ts new file mode 100644 index 00000000..59a7689e --- /dev/null +++ b/packages/varlock/src/lib/test/schema-cache.test.ts @@ -0,0 +1,67 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import fs from 'node:fs/promises'; +import path from 'node:path'; +import os from 'node:os'; +import { clearSchemasCache, clearPluginsCache, clearAllCaches } from '../../lib/schema-cache'; + +describe('schema-cache', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'varlock-cache-test-')); + vi.stubEnv('XDG_CONFIG_HOME', tmpDir); + }); + + afterEach(async () => { + vi.unstubAllEnvs(); + await fs.rm(tmpDir, { recursive: true, force: true }); + }); + + describe('clearSchemasCache', () => { + test('clears schemas cache directory', async () => { + const schemasDir = path.join(tmpDir, 'varlock', 'schemas-cache'); + await fs.mkdir(schemasDir, { recursive: true }); + await fs.writeFile(path.join(schemasDir, 'test.env'), 'TEST=1'); + + await clearSchemasCache(); + + const exists = await fs.stat(schemasDir).then(() => true, () => false); + expect(exists).toBe(false); + }); + + test('does not error when directory does not exist', async () => { + await expect(clearSchemasCache()).resolves.not.toThrow(); + }); + }); + + describe('clearPluginsCache', () => { + test('clears plugins cache directory', async () => { + const pluginsDir = path.join(tmpDir, 'varlock', 'plugins-cache'); + await fs.mkdir(pluginsDir, { recursive: true }); + await fs.writeFile(path.join(pluginsDir, 'test.tgz'), 'dummy'); + + await clearPluginsCache(); + + const exists = await fs.stat(pluginsDir).then(() => true, () => false); + expect(exists).toBe(false); + }); + }); + + describe('clearAllCaches', () => { + test('clears both schemas and plugins caches', async () => { + const schemasDir = path.join(tmpDir, 'varlock', 'schemas-cache'); + const pluginsDir = path.join(tmpDir, 'varlock', 'plugins-cache'); + await fs.mkdir(schemasDir, { recursive: true }); + await fs.mkdir(pluginsDir, { recursive: true }); + await fs.writeFile(path.join(schemasDir, 'test.env'), 'TEST=1'); + await fs.writeFile(path.join(pluginsDir, 'test.tgz'), 'dummy'); + + await clearAllCaches(); + + const schemasExists = await fs.stat(schemasDir).then(() => true, () => false); + const pluginsExists = await fs.stat(pluginsDir).then(() => true, () => false); + expect(schemasExists).toBe(false); + expect(pluginsExists).toBe(false); + }); + }); +}); diff --git a/public-schemas/README.md b/public-schemas/README.md new file mode 100644 index 00000000..7d0eaa84 --- /dev/null +++ b/public-schemas/README.md @@ -0,0 +1,32 @@ +# Public Schemas + +This directory contains pre-built `.env` schema files that can be imported into any project using the `public-schemas:` import protocol. + +## Usage + +Import a public schema in your `.env.schema` file: + +```env-spec +# @import(public-schemas:platforms/vercel) +``` + +This will fetch and cache the schema from the varlock repository on GitHub. + +## Available Schemas + +### Platforms + +- **`platforms/vercel`** - Vercel system environment variables +- **`platforms/netlify`** - Netlify build environment variables +- **`platforms/cloudflare-pages`** - Cloudflare Pages build environment variables +- **`platforms/cloudflare-wrangler`** - Cloudflare Wrangler system environment variables + +## Contributing + +To add a new public schema: + +1. Create a `.env.` file in the appropriate subdirectory +2. Follow the existing format using `@env-spec` decorators +3. Include documentation links with `@docs()` decorators +4. Mark all items as `@optional` (since they're platform-injected) +5. Mark sensitive items with `@sensitive` diff --git a/public-schemas/platforms/.env.cloudflare-pages b/public-schemas/platforms/.env.cloudflare-pages new file mode 100644 index 00000000..1b188abd --- /dev/null +++ b/public-schemas/platforms/.env.cloudflare-pages @@ -0,0 +1,27 @@ +# Cloudflare Pages Build Environment Variables +# @see https://developers.cloudflare.com/pages/configuration/build-configuration/#environment-variables +# --- + +# Flag to detect the build is running on Cloudflare Pages rather than locally +# @type=boolean +# @optional +# @docs(Cloudflare Pages env vars, https://developers.cloudflare.com/pages/configuration/build-configuration/#environment-variables) +CF_PAGES= + +# Branch name of the current Cloudflare Pages deployment +# @type=string +# @optional +# @docs(Cloudflare Pages env vars, https://developers.cloudflare.com/pages/configuration/build-configuration/#environment-variables) +CF_PAGES_BRANCH= + +# SHA hash of the current Cloudflare Pages commit +# @type=string +# @optional +# @docs(Cloudflare Pages env vars, https://developers.cloudflare.com/pages/configuration/build-configuration/#environment-variables) +CF_PAGES_COMMIT_SHA= + +# URL of the current Cloudflare Pages deployment +# @type=url +# @optional +# @docs(Cloudflare Pages env vars, https://developers.cloudflare.com/pages/configuration/build-configuration/#environment-variables) +CF_PAGES_URL= diff --git a/public-schemas/platforms/.env.cloudflare-wrangler b/public-schemas/platforms/.env.cloudflare-wrangler new file mode 100644 index 00000000..26f62a80 --- /dev/null +++ b/public-schemas/platforms/.env.cloudflare-wrangler @@ -0,0 +1,49 @@ +# Cloudflare Wrangler System Environment Variables +# @see https://developers.cloudflare.com/workers/wrangler/system-environment-variables/ +# --- + +# The account ID for the Workers related account +# @type=string +# @optional +# @docs(Cloudflare Wrangler env vars, https://developers.cloudflare.com/workers/wrangler/system-environment-variables/) +CLOUDFLARE_ACCOUNT_ID= + +# The API token for your Cloudflare account, used for authentication in CI/CD and automation +# @type=string +# @sensitive +# @optional +# @docs(Cloudflare Wrangler env vars, https://developers.cloudflare.com/workers/wrangler/system-environment-variables/) +CLOUDFLARE_API_TOKEN= + +# The API key for your Cloudflare account, usually used for older authentication method with CLOUDFLARE_EMAIL +# @type=string +# @sensitive +# @optional +# @docs(Cloudflare Wrangler env vars, https://developers.cloudflare.com/workers/wrangler/system-environment-variables/) +CLOUDFLARE_API_KEY= + +# The email address associated with your Cloudflare account, usually used with CLOUDFLARE_API_KEY +# @type=email +# @optional +# @docs(Cloudflare Wrangler env vars, https://developers.cloudflare.com/workers/wrangler/system-environment-variables/) +CLOUDFLARE_EMAIL= + +# Whether to send anonymous usage metrics to Cloudflare +# @type=boolean +# @optional +# @docs(Cloudflare Wrangler env vars, https://developers.cloudflare.com/workers/wrangler/system-environment-variables/) +CLOUDFLARE_SEND_METRICS= + +# The base URL for the Cloudflare API +# @type=url +# @optional +# @example=https://api.cloudflare.com/client/v4 +# @docs(Cloudflare Wrangler env vars, https://developers.cloudflare.com/workers/wrangler/system-environment-variables/) +CLOUDFLARE_API_BASE_URL= + +# Controls what level of logs should be shown from Wrangler +# If an invalid level is specified, Wrangler will fallback to the default (log) +# @type=enum(none, error, warn, info, log, debug) +# @optional +# @docs(Cloudflare Wrangler env vars, https://developers.cloudflare.com/workers/wrangler/system-environment-variables/) +WRANGLER_LOG= diff --git a/public-schemas/platforms/.env.netlify b/public-schemas/platforms/.env.netlify new file mode 100644 index 00000000..4bec606e --- /dev/null +++ b/public-schemas/platforms/.env.netlify @@ -0,0 +1,105 @@ +# Netlify Build Environment Variables +# @see https://docs.netlify.com/configure-builds/environment-variables/ +# --- + +# The deploy context (dev, branch-deploy, deploy-preview, or production) +# @type=enum(dev, branch-deploy, deploy-preview, production) +# @optional +# @docs(Netlify deploy contexts, https://docs.netlify.com/site-deploys/overview/#deploy-contexts) +CONTEXT= + +# Indicator that the build is running on Netlify +# @type=boolean +# @optional +# @docs(Netlify build metadata, https://docs.netlify.com/configure-builds/environment-variables/#build-metadata) +NETLIFY= + +# Unique ID for the Netlify build +# @type=string +# @optional +# @example=5d4aeac2ccabf517d2f219b8 +# @docs(Netlify build metadata, https://docs.netlify.com/configure-builds/environment-variables/#build-metadata) +BUILD_ID= + +# URL for the linked Git repository +# @type=string +# @optional +# @docs(Netlify git metadata, https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +REPOSITORY_URL= + +# Name of the current branch being built +# @type=string +# @optional +# @docs(Netlify git metadata, https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +BRANCH= + +# Name of the head branch received from a Git provider +# @type=string +# @optional +# @docs(Netlify git metadata, https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +HEAD= + +# Reference ID (SHA) of the commit being built +# @type=string +# @optional +# @docs(Netlify git metadata, https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +COMMIT_REF= + +# Reference ID (SHA) of the last commit that was built before the current build +# When a build runs without cache, it will be the same as the COMMIT_REF +# @type=string +# @optional +# @docs(Netlify git metadata, https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +CACHED_COMMIT_REF= + +# Whether the build is from a pull/merge request +# @type=boolean +# @optional +# @docs(Netlify git metadata, https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +PULL_REQUEST= + +# ID of the pull/merge request and the Deploy Preview it generated +# For example, deploy-preview-12 is for PR #12 in your repository +# @type=string +# @optional +# @docs(Netlify git metadata, https://docs.netlify.com/configure-builds/environment-variables/#git-metadata) +REVIEW_ID= + +# URL representing the main address to your site (Netlify subdomain or custom domain) +# @type=url +# @optional +# @example=https://petsof.netlify.app +# @docs(Netlify deploy URLs, https://docs.netlify.com/configure-builds/environment-variables/#deploy-urls-and-metadata) +URL= + +# URL representing the unique URL for an individual deploy (starts with a unique ID) +# @type=string +# @optional +# @example=https://5b243e66dd6a547b4fee73ae--petsof.netlify.app +# @docs(Netlify deploy URLs, https://docs.netlify.com/configure-builds/environment-variables/#deploy-urls-and-metadata) +DEPLOY_URL= + +# URL representing the primary URL for an individual deploy or group of deploys +# @type=string +# @optional +# @example=https://feature-branch--petsof.netlify.app +# @docs(Netlify deploy URLs, https://docs.netlify.com/configure-builds/environment-variables/#deploy-urls-and-metadata) +DEPLOY_PRIME_URL= + +# Unique ID for the specific deploy +# @type=string +# @optional +# @docs(Netlify deploy URLs, https://docs.netlify.com/configure-builds/environment-variables/#deploy-urls-and-metadata) +DEPLOY_ID= + +# Name of the site (also the Netlify subdomain) +# @type=string +# @optional +# @docs(Netlify deploy URLs, https://docs.netlify.com/configure-builds/environment-variables/#deploy-urls-and-metadata) +SITE_NAME= + +# Unique ID for the Netlify site +# @type=string +# @optional +# @docs(Netlify deploy URLs, https://docs.netlify.com/configure-builds/environment-variables/#deploy-urls-and-metadata) +SITE_ID= diff --git a/public-schemas/platforms/.env.vercel b/public-schemas/platforms/.env.vercel new file mode 100644 index 00000000..1fca31d8 --- /dev/null +++ b/public-schemas/platforms/.env.vercel @@ -0,0 +1,130 @@ +# Vercel System Environment Variables +# @see https://vercel.com/docs/projects/environment-variables/system-environment-variables +# --- + +# An indicator to show that System Environment Variables have been exposed to your project's Deployments +# @type=boolean +# @optional +# @docs(Vercel system env vars, https://vercel.com/docs/projects/environment-variables/system-environment-variables) +VERCEL= + +# An indicator that the code is running in a Continuous Integration environment (build-time only) +# @type=boolean +# @optional +CI= + +# The current deployment environment - development, preview, or production +# @type=enum(development, preview, production) +# @optional +# @docs(Vercel deployment environments, https://vercel.com/docs/deployments/environments) +VERCEL_ENV= + +# The domain name of the generated deployment URL (does not include the protocol scheme https://) +# NOTE: This Variable cannot be used in conjunction with Standard Deployment Protection +# @type=string +# @optional +# @example=*.vercel.app +VERCEL_URL= + +# The domain name of the generated Git branch URL (does not include the protocol scheme https://) +# @type=string +# @optional +# @example=*-git-*.vercel.app +# @docs(Vercel generated URLs, https://vercel.com/docs/deployments/generated-urls#url-with-git-branch) +VERCEL_BRANCH_URL= + +# A production domain name of the project, useful for generating links that point to production such as OG-image URLs +# Selects the shortest production custom domain, or vercel.app domain if no custom domain is available +# The value does not include the protocol scheme https:// +# @type=string +# @optional +VERCEL_PROJECT_PRODUCTION_URL= + +# The ID of the Region where the app is running (run-time only) +# @type=string +# @optional +# @example=cdg1 +# @docs(Vercel edge network regions, https://vercel.com/docs/edge-network/regions) +VERCEL_REGION= + +# The unique identifier for the deployment, which can be used to implement Skew Protection +# @type=string +# @optional +# @example=dpl_7Gw5ZMBpQA8h9GF832KGp7nwbuh3 +VERCEL_DEPLOYMENT_ID= + +# When Skew Protection is enabled in Project Settings, this value is set to 1 +# @type=boolean +# @optional +VERCEL_SKEW_PROTECTION_ENABLED= + +# The Protection Bypass for Automation value, if the secret has been generated in the project's Deployment Protection settings +# @type=string +# @sensitive +# @optional +VERCEL_AUTOMATION_BYPASS_SECRET= + +# The Git Provider the deployment is triggered from +# @type=string +# @optional +# @example=github +VERCEL_GIT_PROVIDER= + +# The origin repository the deployment is triggered from +# @type=string +# @optional +VERCEL_GIT_REPO_SLUG= + +# The account that owns the repository the deployment is triggered from +# @type=string +# @optional +VERCEL_GIT_REPO_OWNER= + +# The ID of the repository the deployment is triggered from +# @type=string +# @optional +# @example=117716146 +VERCEL_GIT_REPO_ID= + +# The git branch of the commit the deployment was triggered by +# @type=string +# @optional +# @example=improve-about-page +VERCEL_GIT_COMMIT_REF= + +# The git SHA of the commit the deployment was triggered by +# @type=string +# @optional +# @example=fa1eade47b73733d6312d5abfad33ce9e4068081 +VERCEL_GIT_COMMIT_SHA= + +# The message attached to the commit the deployment was triggered by +# @type=string +# @optional +# @example=Update about page +VERCEL_GIT_COMMIT_MESSAGE= + +# The username attached to the author of the commit that the project was deployed by +# @type=string +# @optional +# @example=johndoe +VERCEL_GIT_COMMIT_AUTHOR_LOGIN= + +# The name attached to the author of the commit that the project was deployed by +# @type=string +# @optional +# @example=John Doe +VERCEL_GIT_COMMIT_AUTHOR_NAME= + +# The git SHA of the last successful deployment for the project and branch (build-time only) +# NOTE: This Variable is only exposed when an Ignored Build Step is provided +# @type=string +# @optional +# @example=fa1eade47b73733d6312d5abfad33ce9e4068080 +VERCEL_GIT_PREVIOUS_SHA= + +# The pull request id the deployment was triggered by +# If a deployment is created on a branch before a pull request is made, this value will be an empty string +# @type=string +# @optional +VERCEL_GIT_PULL_REQUEST_ID= From b64374bfef65b2eb910e06d73f187394c2542bc3 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 2 Apr 2026 20:50:08 +0000 Subject: [PATCH 2/5] fix: lint errors and update import documentation - Fix max-len, unused vars, no-use-before-define lint issues - Add documentation for public-schemas and plugin-schema import protocols - Add security restrictions documentation Agent-Logs-Url: https://github.com/dmno-dev/varlock/sessions/e612b56c-9a06-4feb-867c-370c7a82ea6f Co-authored-by: theoephraim <1158956+theoephraim@users.noreply.github.com> --- .../src/content/docs/guides/import.mdx | 49 ++++++++++++- .../varlock/src/env-graph/lib/data-source.ts | 3 +- .../src/env-graph/lib/plugin-schema.ts | 68 +++++++++---------- .../src/env-graph/test/remote-import.test.ts | 6 +- packages/varlock/src/lib/schema-cache.ts | 5 +- .../varlock/src/lib/test/schema-cache.test.ts | 4 +- 6 files changed, 94 insertions(+), 41 deletions(-) diff --git a/packages/varlock-website/src/content/docs/guides/import.mdx b/packages/varlock-website/src/content/docs/guides/import.mdx index 6226a30d..6c8289bb 100644 --- a/packages/varlock-website/src/content/docs/guides/import.mdx +++ b/packages/varlock-website/src/content/docs/guides/import.mdx @@ -11,12 +11,12 @@ The [`@import()` root decorator](/reference/root-decorators/#import) allows you # @import(./env-dir/) # import directory # @import(./.env.partial, KEY1, KEY2) # import specific keys # @import(~/.env.shared) # import from home directory +# @import(public-schemas:platforms/vercel) # import public schema ``` ## Import source types The first argument to `@import()` specifies where to look for file(s) to import. -Currently only local file imports are supported, but we plan to support importing over http in a style similar to Deno's http imports. For now, all imported files must be `.env` files (and may contain @env-spec decorators), but in the future, we may also support other formats (e.g., JSON, YAML, etc.) or even JS/TS files. @@ -68,6 +68,53 @@ A registry (npm, jsr, etc.), package name, version, and path can be used to impo ``` */} +### Public schemas + +Varlock maintains a collection of pre-built schemas for popular platforms and services. Import them using the `public-schemas:` protocol. + +- Schema is fetched from GitHub and cached locally (in `~/.config/varlock/schemas-cache/`) +- Cache is refreshed automatically after 24 hours +- Use `varlock cache clear schemas` to force a refresh + +**Available platform schemas:** + +| Schema | Protocol path | Description | +|--------|--------------|-------------| +| Vercel | `public-schemas:platforms/vercel` | Vercel system environment variables | +| Netlify | `public-schemas:platforms/netlify` | Netlify build environment variables | +| Cloudflare Pages | `public-schemas:platforms/cloudflare-pages` | Cloudflare Pages build variables | +| Cloudflare Wrangler | `public-schemas:platforms/cloudflare-wrangler` | Cloudflare Wrangler system variables | + +```env-spec +# @import(public-schemas:platforms/vercel) +``` + +:::tip[Partial imports] +You can import only specific variables from a public schema: + +```env-spec +# @import(public-schemas:platforms/vercel, VERCEL_ENV, VERCEL_URL) +``` +::: + +### Plugin schemas + +If you have a Varlock plugin installed that provides a schema, you can import it using the `plugin-schema:` protocol. + +- Looks for an `env-schema` field in the plugin's `package.json`, or falls back to a `.env.schema` file in the package root +- The plugin must be installed in your project's `node_modules` + +```env-spec +# @import(plugin-schema:@varlock/1password-plugin) +``` + +:::caution[Security] +Remotely imported schemas (via `public-schemas:` or `plugin-schema:`) have security restrictions: +- They **cannot** install plugins (`@plugin` is not allowed) +- They **cannot** import local files (`./`, `../`, `~/`, or absolute paths) +- They can only define schema information (types, descriptions, validation rules) +::: + ## Partial imports By default, all items will be imported, but you may add a list of specific keys to import as additional args after the first. diff --git a/packages/varlock/src/env-graph/lib/data-source.ts b/packages/varlock/src/env-graph/lib/data-source.ts index cd8471e2..d2dc3545 100644 --- a/packages/varlock/src/env-graph/lib/data-source.ts +++ b/packages/varlock/src/env-graph/lib/data-source.ts @@ -506,7 +506,8 @@ export abstract class EnvGraphDataSource { } try { // eslint-disable-next-line no-use-before-define - const schemaSource = await resolvePluginSchema(pluginName, this instanceof FileBasedDataSource ? this : undefined); + const fileSource = this instanceof FileBasedDataSource ? this : undefined; + const schemaSource = await resolvePluginSchema(pluginName, fileSource); if (!schemaSource) { if (allowMissing) continue; this._loadingError = new Error(`Plugin "${pluginName}" does not expose a schema file`); diff --git a/packages/varlock/src/env-graph/lib/plugin-schema.ts b/packages/varlock/src/env-graph/lib/plugin-schema.ts index 1f26179e..38bcd695 100644 --- a/packages/varlock/src/env-graph/lib/plugin-schema.ts +++ b/packages/varlock/src/env-graph/lib/plugin-schema.ts @@ -6,6 +6,40 @@ import { getWorkspaceInfo } from '../../lib/workspace-utils'; // Lazy import to avoid circular dependency import type { FileBasedDataSource, EnvGraphDataSource } from './data-source'; +/** + * Finds the schema file in a plugin package. + * + * Checks: + * 1. "env-schema" field in package.json (points to a file relative to package root) + * 2. .env.schema file in the package root + */ +async function findPluginSchemaFile(packageDir: string): Promise { + const pkgJsonPath = path.join(packageDir, 'package.json'); + + try { + const pkgJsonContent = await fs.readFile(pkgJsonPath, 'utf-8'); + const pkgJson = JSON.parse(pkgJsonContent); + + // Check for explicit "env-schema" field + if (pkgJson['env-schema']) { + const schemaFilePath = path.resolve(packageDir, pkgJson['env-schema']); + if (await pathExists(schemaFilePath)) { + return schemaFilePath; + } + } + } catch { + // No package.json or couldn't read it + } + + // Fallback: look for .env.schema in the package root + const defaultSchemaPath = path.join(packageDir, '.env.schema'); + if (await pathExists(defaultSchemaPath)) { + return defaultSchemaPath; + } + + return undefined; +} + /** * Resolves a schema file (.env.schema) from an installed plugin package. * @@ -56,37 +90,3 @@ export async function resolvePluginSchema( throw new Error(`Plugin package "${pluginName}" not found in node_modules`); } - -/** - * Finds the schema file in a plugin package. - * - * Checks: - * 1. "env-schema" field in package.json (points to a file relative to package root) - * 2. .env.schema file in the package root - */ -async function findPluginSchemaFile(packageDir: string): Promise { - const pkgJsonPath = path.join(packageDir, 'package.json'); - - try { - const pkgJsonContent = await fs.readFile(pkgJsonPath, 'utf-8'); - const pkgJson = JSON.parse(pkgJsonContent); - - // Check for explicit "env-schema" field - if (pkgJson['env-schema']) { - const schemaFilePath = path.resolve(packageDir, pkgJson['env-schema']); - if (await pathExists(schemaFilePath)) { - return schemaFilePath; - } - } - } catch { - // No package.json or couldn't read it - } - - // Fallback: look for .env.schema in the package root - const defaultSchemaPath = path.join(packageDir, '.env.schema'); - if (await pathExists(defaultSchemaPath)) { - return defaultSchemaPath; - } - - return undefined; -} diff --git a/packages/varlock/src/env-graph/test/remote-import.test.ts b/packages/varlock/src/env-graph/test/remote-import.test.ts index f8f53994..a9a519e2 100644 --- a/packages/varlock/src/env-graph/test/remote-import.test.ts +++ b/packages/varlock/src/env-graph/test/remote-import.test.ts @@ -1,8 +1,10 @@ -import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import { + describe, test, expect, vi, beforeEach, afterEach, +} from 'vitest'; import outdent from 'outdent'; import path from 'node:path'; import { - EnvGraph, DirectoryDataSource, DotEnvFileDataSource, + EnvGraph, DirectoryDataSource, } from '../../index'; // Helper similar to envFilesTest but for testing graph loading diff --git a/packages/varlock/src/lib/schema-cache.ts b/packages/varlock/src/lib/schema-cache.ts index e09f05f2..b1d3bbce 100644 --- a/packages/varlock/src/lib/schema-cache.ts +++ b/packages/varlock/src/lib/schema-cache.ts @@ -63,7 +63,9 @@ function fetchUrl(url: string): Promise { return; } let data = ''; - res.on('data', (chunk) => { data += chunk; }); + res.on('data', (chunk) => { + data += chunk; + }); res.on('end', () => resolve(data)); res.on('error', reject); }).on('error', reject); @@ -101,7 +103,6 @@ export async function fetchPublicSchema(schemaPath: string): Promise { } // Fetch from GitHub - const url = `${PUBLIC_SCHEMAS_GITHUB_BASE}/${schemaPath.replace(/^\//, '')}`; // The actual file on disk is named .env., so we need to construct the URL properly // schemaPath is like "platforms/vercel" -> fetch "platforms/.env.vercel" const parts = schemaPath.split('/'); diff --git a/packages/varlock/src/lib/test/schema-cache.test.ts b/packages/varlock/src/lib/test/schema-cache.test.ts index 59a7689e..38646250 100644 --- a/packages/varlock/src/lib/test/schema-cache.test.ts +++ b/packages/varlock/src/lib/test/schema-cache.test.ts @@ -1,4 +1,6 @@ -import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import { + describe, test, expect, vi, beforeEach, afterEach, +} from 'vitest'; import fs from 'node:fs/promises'; import path from 'node:path'; import os from 'node:os'; From 602e6b7e2ec99f8dc704908fe43ef85554d2970f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 2 Apr 2026 20:52:41 +0000 Subject: [PATCH 3/5] chore: address code review feedback and add changeset - Add comment explaining GitHub branch choice for public schemas URL - Add comment explaining hash truncation in cache - Improve synthetic path sanitization for remote imports - Include cache target in error messages - Add changeset for minor version bump Agent-Logs-Url: https://github.com/dmno-dev/varlock/sessions/e612b56c-9a06-4feb-867c-370c7a82ea6f Co-authored-by: theoephraim <1158956+theoephraim@users.noreply.github.com> --- .changeset/remote-imports-support.md | 13 +++++++++++++ packages/varlock/src/cli/commands/cache.command.ts | 2 +- packages/varlock/src/env-graph/lib/data-source.ts | 5 +++-- packages/varlock/src/lib/schema-cache.ts | 2 ++ 4 files changed, 19 insertions(+), 3 deletions(-) create mode 100644 .changeset/remote-imports-support.md diff --git a/.changeset/remote-imports-support.md b/.changeset/remote-imports-support.md new file mode 100644 index 00000000..7580f5a4 --- /dev/null +++ b/.changeset/remote-imports-support.md @@ -0,0 +1,13 @@ +--- +"varlock": minor +--- + +Add remote import support for `@import()` decorator with two new protocols: + +- `public-schemas:` - Import pre-built schemas for popular platforms (Vercel, Netlify, Cloudflare) from the varlock repository, with local caching +- `plugin-schema:` - Import schema files from installed plugin packages + +Also adds: +- `varlock cache clear` CLI command to manage cached schemas and plugins +- Security restrictions for remote imports (no plugin installation, no local file access) +- Public schemas for Vercel, Netlify, Cloudflare Pages, and Cloudflare Wrangler diff --git a/packages/varlock/src/cli/commands/cache.command.ts b/packages/varlock/src/cli/commands/cache.command.ts index 7b0f84c0..662d7b41 100644 --- a/packages/varlock/src/cli/commands/cache.command.ts +++ b/packages/varlock/src/cli/commands/cache.command.ts @@ -55,6 +55,6 @@ export const commandFn: TypedGunshiCommandFn = async (ctx) = } } catch (error) { console.error('Failed to clear cache:', error); - throw new CliExitError('Failed to clear cache', { forceExit: true }); + throw new CliExitError(`Failed to clear ${cacheTarget} cache`, { forceExit: true }); } }; diff --git a/packages/varlock/src/env-graph/lib/data-source.ts b/packages/varlock/src/env-graph/lib/data-source.ts index d2dc3545..e85c93d9 100644 --- a/packages/varlock/src/env-graph/lib/data-source.ts +++ b/packages/varlock/src/env-graph/lib/data-source.ts @@ -485,8 +485,9 @@ export abstract class EnvGraphDataSource { } try { const contents = await fetchPublicSchema(schemaPath); - // Use a synthetic path for the data source - const syntheticPath = `.env.public-schema-${schemaPath.replace(/\//g, '-')}`; + // Sanitize the schema path for use as a synthetic filename + const safeName = schemaPath.replace(/[^a-zA-Z0-9_-]/g, '-'); + const syntheticPath = `.env.public-schema-${safeName}`; // eslint-disable-next-line no-use-before-define const source = new DotEnvFileDataSource(syntheticPath, { overrideContents: contents }); await this.addChild(source, { diff --git a/packages/varlock/src/lib/schema-cache.ts b/packages/varlock/src/lib/schema-cache.ts index b1d3bbce..4c4ae623 100644 --- a/packages/varlock/src/lib/schema-cache.ts +++ b/packages/varlock/src/lib/schema-cache.ts @@ -5,6 +5,7 @@ import crypto from 'node:crypto'; import { getUserVarlockDir } from '../../lib/user-config-dir'; // GitHub raw content base URL for the varlock repo public-schemas +// Uses the `main` branch — schemas are versioned alongside the varlock release const PUBLIC_SCHEMAS_GITHUB_BASE = 'https://raw.githubusercontent.com/dmno-dev/varlock/main/public-schemas'; // Default TTL for cached schemas (24 hours in milliseconds) @@ -73,6 +74,7 @@ function fetchUrl(url: string): Promise { } function hashContent(content: string): string { + // Truncate to 16 hex chars — sufficient for cache invalidation while keeping filenames short return crypto.createHash('sha256').update(content).digest('hex').slice(0, 16); } From f96fea33839ac6b3cc991f5d35714fd5843e4037 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 7 Apr 2026 05:57:29 +0000 Subject: [PATCH 4/5] fix: handle missing home directory and improve plugin-schema imports - getUserVarlockDir() falls back to os.tmpdir() when HOME is unavailable - plugin-schema: uses .env.schema export from package.json exports map - plugin-schema: supports loading specific files (e.g., plugin-schema:@pkg/.env.custom) - Update import documentation for new plugin-schema syntax Agent-Logs-Url: https://github.com/dmno-dev/varlock/sessions/d6d84ed7-8a55-44f0-9827-236fc1a7b1ce Co-authored-by: theoephraim <1158956+theoephraim@users.noreply.github.com> --- .../src/content/docs/guides/import.mdx | 4 +- .../varlock/src/env-graph/lib/data-source.ts | 12 ++- .../src/env-graph/lib/plugin-schema.ts | 99 +++++++++++++++---- packages/varlock/src/lib/user-config-dir.ts | 15 ++- 4 files changed, 99 insertions(+), 31 deletions(-) diff --git a/packages/varlock-website/src/content/docs/guides/import.mdx b/packages/varlock-website/src/content/docs/guides/import.mdx index 6c8289bb..6db8933a 100644 --- a/packages/varlock-website/src/content/docs/guides/import.mdx +++ b/packages/varlock-website/src/content/docs/guides/import.mdx @@ -101,11 +101,13 @@ You can import only specific variables from a public schema: If you have a Varlock plugin installed that provides a schema, you can import it using the `plugin-schema:` protocol. -- Looks for an `env-schema` field in the plugin's `package.json`, or falls back to a `.env.schema` file in the package root +- Looks for a `.env.schema` export in the plugin's `package.json` exports map, or falls back to a `.env.schema` file in the package root - The plugin must be installed in your project's `node_modules` +- You can specify a different file path if the plugin exposes multiple schema files ```env-spec # @import(plugin-schema:@varlock/1password-plugin) +# @import(plugin-schema:@varlock/1password-plugin/.env.connect) ``` :::caution[Security] diff --git a/packages/varlock/src/env-graph/lib/data-source.ts b/packages/varlock/src/env-graph/lib/data-source.ts index e85c93d9..136129e0 100644 --- a/packages/varlock/src/env-graph/lib/data-source.ts +++ b/packages/varlock/src/env-graph/lib/data-source.ts @@ -500,18 +500,20 @@ export abstract class EnvGraphDataSource { } } else if (importPath.startsWith('plugin-schema:')) { // Import schema from an installed plugin package - const pluginName = importPath.slice('plugin-schema:'.length); - if (!pluginName) { + // Supports: plugin-schema:@scope/name (defaults to .env.schema) + // plugin-schema:@scope/name/.env.custom (specific file) + const pluginDescriptor = importPath.slice('plugin-schema:'.length); + if (!pluginDescriptor) { this._loadingError = new Error('plugin-schema: import must specify a plugin name'); return; } try { // eslint-disable-next-line no-use-before-define const fileSource = this instanceof FileBasedDataSource ? this : undefined; - const schemaSource = await resolvePluginSchema(pluginName, fileSource); + const schemaSource = await resolvePluginSchema(pluginDescriptor, fileSource); if (!schemaSource) { if (allowMissing) continue; - this._loadingError = new Error(`Plugin "${pluginName}" does not expose a schema file`); + this._loadingError = new Error(`Plugin "${pluginDescriptor}" does not expose the requested schema file`); return; } await this.addChild(schemaSource, { @@ -519,7 +521,7 @@ export abstract class EnvGraphDataSource { }); } catch (pluginErr) { if (allowMissing) continue; - this._loadingError = new Error(`Failed to resolve plugin schema "${pluginName}": ${(pluginErr as Error).message}`); + this._loadingError = new Error(`Failed to resolve plugin schema "${pluginDescriptor}": ${(pluginErr as Error).message}`); return; } } else if (importPath.startsWith('http://') || importPath.startsWith('https://')) { diff --git a/packages/varlock/src/env-graph/lib/plugin-schema.ts b/packages/varlock/src/env-graph/lib/plugin-schema.ts index 38bcd695..1a8b9fa2 100644 --- a/packages/varlock/src/env-graph/lib/plugin-schema.ts +++ b/packages/varlock/src/env-graph/lib/plugin-schema.ts @@ -7,32 +7,45 @@ import { getWorkspaceInfo } from '../../lib/workspace-utils'; import type { FileBasedDataSource, EnvGraphDataSource } from './data-source'; /** - * Finds the schema file in a plugin package. + * Finds a specific file in a plugin package by checking the exports map. * - * Checks: - * 1. "env-schema" field in package.json (points to a file relative to package root) - * 2. .env.schema file in the package root + * Looks for a `.env.schema` export (or the given subpath) in the plugin's + * `package.json` exports field, then falls back to checking the file on disk. + * + * @param packageDir - Absolute path to the plugin package root + * @param filePath - The file subpath to look for (e.g., `.env.schema` or `.env.custom`) */ -async function findPluginSchemaFile(packageDir: string): Promise { +async function findPluginSchemaFile( + packageDir: string, + filePath: string, +): Promise { const pkgJsonPath = path.join(packageDir, 'package.json'); try { const pkgJsonContent = await fs.readFile(pkgJsonPath, 'utf-8'); const pkgJson = JSON.parse(pkgJsonContent); - // Check for explicit "env-schema" field - if (pkgJson['env-schema']) { - const schemaFilePath = path.resolve(packageDir, pkgJson['env-schema']); - if (await pathExists(schemaFilePath)) { - return schemaFilePath; + // Check for the file in the exports map (e.g., "./.env.schema": "./dist/.env.schema") + const exportKey = `./${filePath}`; + if (pkgJson.exports?.[exportKey]) { + const exportTarget = pkgJson.exports[exportKey]; + // exports can be a string or { default: string } + const resolvedExport = typeof exportTarget === 'string' + ? exportTarget + : exportTarget?.default; + if (resolvedExport) { + const schemaFilePath = path.resolve(packageDir, resolvedExport); + if (await pathExists(schemaFilePath)) { + return schemaFilePath; + } } } } catch { // No package.json or couldn't read it } - // Fallback: look for .env.schema in the package root - const defaultSchemaPath = path.join(packageDir, '.env.schema'); + // Fallback: look for the file directly in the package root + const defaultSchemaPath = path.join(packageDir, filePath); if (await pathExists(defaultSchemaPath)) { return defaultSchemaPath; } @@ -41,22 +54,66 @@ async function findPluginSchemaFile(packageDir: string): Promise { // Lazy import to avoid circular dependency at module level const { DotEnvFileDataSource } = await import('./data-source'); + const { packageName, filePath } = parsePluginSchemaDescriptor(descriptor); + const workspaceRootPath = getWorkspaceInfo()?.rootPath; // Start from the file data source's directory, or cwd @@ -70,10 +127,10 @@ export async function resolvePluginSchema( // Walk up the directory tree looking for the plugin in node_modules let currentDir = startDir; while (currentDir) { - const candidatePath = path.join(currentDir, 'node_modules', pluginName); + const candidatePath = path.join(currentDir, 'node_modules', packageName); if (await pathExists(candidatePath)) { - // Found the plugin package - look for its schema - const schemaPath = await findPluginSchemaFile(candidatePath); + // Found the plugin package - look for its schema file + const schemaPath = await findPluginSchemaFile(candidatePath, filePath); if (schemaPath) { return new DotEnvFileDataSource(schemaPath); } @@ -88,5 +145,5 @@ export async function resolvePluginSchema( currentDir = parentDir; } - throw new Error(`Plugin package "${pluginName}" not found in node_modules`); + throw new Error(`Plugin package "${packageName}" not found in node_modules`); } diff --git a/packages/varlock/src/lib/user-config-dir.ts b/packages/varlock/src/lib/user-config-dir.ts index 8c9235e8..46e432a6 100644 --- a/packages/varlock/src/lib/user-config-dir.ts +++ b/packages/varlock/src/lib/user-config-dir.ts @@ -7,19 +7,26 @@ import { existsSync } from 'node:fs'; * * Resolution order: * 1. If `$XDG_CONFIG_HOME` is set → `$XDG_CONFIG_HOME/varlock` - * 2. If legacy `~/.varlock` exists → `~/.varlock` (backwards compatibility) - * 3. Otherwise → `~/.config/varlock` (XDG default) + * 2. If home directory is available and legacy `~/.varlock` exists → `~/.varlock` (backwards compatibility) + * 3. If home directory is available → `~/.config/varlock` (XDG default) + * 4. Otherwise → `$TMPDIR/varlock` (fallback when no home directory) * * @see https://specifications.freedesktop.org/basedir/latest/ */ export function getUserVarlockDir(): string { - const home = os.homedir(); - // If XDG_CONFIG_HOME is explicitly set, always respect it if (process.env.XDG_CONFIG_HOME) { return join(process.env.XDG_CONFIG_HOME, 'varlock'); } + const home = os.homedir(); + + // If no home directory is available (e.g., some CI/container environments), + // fall back to a temp directory so caches still work + if (!home) { + return join(os.tmpdir(), 'varlock'); + } + // Backwards compatibility: if legacy ~/.varlock exists, keep using it const legacyDir = join(home, '.varlock'); if (existsSync(legacyDir)) { From 8ea2793c8fb16c60f86c1bb5073b648fa052add4 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 7 Apr 2026 05:59:10 +0000 Subject: [PATCH 5/5] chore: update changeset to reflect plugin-schema and homedir improvements Agent-Logs-Url: https://github.com/dmno-dev/varlock/sessions/d6d84ed7-8a55-44f0-9827-236fc1a7b1ce Co-authored-by: theoephraim <1158956+theoephraim@users.noreply.github.com> --- .changeset/remote-imports-support.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.changeset/remote-imports-support.md b/.changeset/remote-imports-support.md index 7580f5a4..c2c6066a 100644 --- a/.changeset/remote-imports-support.md +++ b/.changeset/remote-imports-support.md @@ -5,9 +5,10 @@ Add remote import support for `@import()` decorator with two new protocols: - `public-schemas:` - Import pre-built schemas for popular platforms (Vercel, Netlify, Cloudflare) from the varlock repository, with local caching -- `plugin-schema:` - Import schema files from installed plugin packages +- `plugin-schema:` - Import schema files from installed plugin packages, with support for loading specific files (e.g., `plugin-schema:@varlock/1password-plugin/.env.connect`) Also adds: - `varlock cache clear` CLI command to manage cached schemas and plugins - Security restrictions for remote imports (no plugin installation, no local file access) - Public schemas for Vercel, Netlify, Cloudflare Pages, and Cloudflare Wrangler +- Graceful fallback to temp directory when user home directory is unavailable