From 644e523c94a83828de4d032a0de6d699940fea81 Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Tue, 13 Jan 2026 17:45:49 +0100 Subject: [PATCH 1/6] aitools: add skills install command for Claude Code Add `databricks experimental aitools skills` subcommand: - `skills list` - list available skills - `skills install` - install all skills to ~/.claude/skills/ Includes databricks-apps skill with reference docs synced from the appkit template. --- experimental/aitools/cmd/aitools.go | 1 + experimental/aitools/cmd/skills.go | 140 +++++++++++++ .../lib/agent_skills/databricks-apps/SKILL.md | 94 +++++++++ .../databricks-apps/references/appkit-sdk.md | 86 ++++++++ .../references/authentication.md | 52 +++++ .../databricks-apps/references/frontend.md | 108 ++++++++++ .../databricks-apps/references/sql-queries.md | 195 ++++++++++++++++++ .../databricks-apps/references/testing.md | 58 ++++++ .../databricks-apps/references/trpc.md | 95 +++++++++ .../aitools/lib/agent_skills/embed.go | 8 + 10 files changed, 837 insertions(+) create mode 100644 experimental/aitools/cmd/skills.go create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md create mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md create mode 100644 experimental/aitools/lib/agent_skills/embed.go diff --git a/experimental/aitools/cmd/aitools.go b/experimental/aitools/cmd/aitools.go index e467db5cd0..a3deaf0c29 100644 --- a/experimental/aitools/cmd/aitools.go +++ b/experimental/aitools/cmd/aitools.go @@ -28,6 +28,7 @@ Provides commands to: cmd.AddCommand(newMcpCmd()) cmd.AddCommand(newInstallCmd()) + cmd.AddCommand(newSkillsCmd()) cmd.AddCommand(newToolsCmd()) return cmd diff --git a/experimental/aitools/cmd/skills.go b/experimental/aitools/cmd/skills.go new file mode 100644 index 0000000000..9284a43ef3 --- /dev/null +++ b/experimental/aitools/cmd/skills.go @@ -0,0 +1,140 @@ +package mcp + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + + "github.com/databricks/cli/experimental/aitools/lib/agent_skills" + "github.com/databricks/cli/libs/cmdio" + "github.com/fatih/color" + "github.com/spf13/cobra" +) + +func newSkillsCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "skills", + Short: "Manage Databricks skills for Claude Code", + Long: `Manage Databricks skills that can be installed to ~/.claude/skills/ for use with Claude Code.`, + } + + cmd.AddCommand(newSkillsListCmd()) + cmd.AddCommand(newSkillsInstallCmd()) + + return cmd +} + +func newSkillsListCmd() *cobra.Command { + return &cobra.Command{ + Use: "list", + Short: "List available skills", + RunE: func(cmd *cobra.Command, args []string) error { + return listSkills(cmd.Context()) + }, + } +} + +func newSkillsInstallCmd() *cobra.Command { + return &cobra.Command{ + Use: "install", + Short: "Install all Databricks skills for Claude Code", + Long: `Install all Databricks skills to ~/.claude/skills/ for use with Claude Code.`, + RunE: func(cmd *cobra.Command, args []string) error { + return installAllSkills(cmd.Context()) + }, + } +} + +func getSkillNames() ([]string, error) { + entries, err := fs.ReadDir(agent_skills.SkillsFS, ".") + if err != nil { + return nil, fmt.Errorf("failed to read skills: %w", err) + } + + var names []string + for _, entry := range entries { + if entry.IsDir() { + names = append(names, entry.Name()) + } + } + return names, nil +} + +func listSkills(ctx context.Context) error { + names, err := getSkillNames() + if err != nil { + return err + } + + cmdio.LogString(ctx, "Available skills:") + cmdio.LogString(ctx, "") + for _, name := range names { + cmdio.LogString(ctx, " "+name) + } + cmdio.LogString(ctx, "") + cmdio.LogString(ctx, "Install all with: databricks experimental aitools skills install") + return nil +} + +func installAllSkills(ctx context.Context) error { + names, err := getSkillNames() + if err != nil { + return err + } + + for _, name := range names { + if err := installSkill(ctx, name); err != nil { + return err + } + } + return nil +} + +func installSkill(ctx context.Context, skillName string) error { + skillFS, err := fs.Sub(agent_skills.SkillsFS, skillName) + if err != nil { + return fmt.Errorf("skill %q not found", skillName) + } + + if _, err := fs.Stat(skillFS, "SKILL.md"); err != nil { + return fmt.Errorf("skill %q not found", skillName) + } + + homeDir, err := os.UserHomeDir() + if err != nil { + return fmt.Errorf("failed to get home directory: %w", err) + } + + destDir := filepath.Join(homeDir, ".claude", "skills", skillName) + + if err := os.MkdirAll(destDir, 0o755); err != nil { + return fmt.Errorf("failed to create directory: %w", err) + } + + err = fs.WalkDir(skillFS, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + destPath := filepath.Join(destDir, path) + + if d.IsDir() { + return os.MkdirAll(destPath, 0o755) + } + + content, err := fs.ReadFile(skillFS, path) + if err != nil { + return fmt.Errorf("failed to read %s: %w", path, err) + } + + return os.WriteFile(destPath, content, 0o644) + }) + if err != nil { + return fmt.Errorf("failed to copy skill files: %w", err) + } + + cmdio.LogString(ctx, color.GreenString("✓ Installed %q to %s", skillName, destDir)) + return nil +} diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md b/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md new file mode 100644 index 0000000000..e82a6fcf0f --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md @@ -0,0 +1,94 @@ +--- +name: databricks-apps +description: Build full-stack TypeScript apps on Databricks. Use when asked to create dashboards, data apps, analytics tools, or visualizations that query Databricks SQL. Provides project scaffolding, SQL data access patterns, and deployment commands. Invoke BEFORE starting implementation. +metadata: + version: "0.1.0" + min_cli_version: "0.250.0" +--- + +# Databricks Apps Development + +Build TypeScript apps that query Databricks SQL warehouses and deploy to Databricks Apps. + +## Workflow + +1. **Verify auth**: `databricks auth profiles` +2. **Find warehouse**: `databricks sql warehouses list` +3. **Explore data**: `databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE` +4. **Scaffold project**: `databricks experimental aitools tools init-template --name my-app --description "..."` +5. **Develop**: `cd my-app && npm install && npm run dev` +6. **Validate**: `databricks experimental aitools tools validate ./` +7. **Deploy**: `databricks experimental aitools tools deploy` (requires user permission) + +## Data Exploration + +```bash +# list catalogs/schemas/tables +databricks catalogs list +databricks schemas list +databricks tables list + +# discover table schema (columns, types, sample data) +databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE + +# test queries +databricks experimental aitools tools query "SELECT * FROM catalog.schema.table LIMIT 10" +``` + +Note: Use separate arguments for `catalogs/schemas/tables` commands. Dot notation only works in `discover-schema` and `query`. + +## Project Structure + +After scaffolding: +- `server/` - Node.js backend with App Kit and tRPC +- `client/` - React frontend with App Kit hooks +- `config/queries/` - SQL query files +- `shared/` - Shared TypeScript types + +## Adding Visualizations + +**Step 1**: Create SQL file in `config/queries/my_data.sql` +```sql +SELECT category, COUNT(*) as count FROM my_table GROUP BY category +``` + +**Step 2**: Define schema in `config/queries/schema.ts` +```typescript +export const querySchemas = { + my_data: z.array(z.object({ category: z.string(), count: z.number() })), +}; +``` + +**Step 3**: Use visualization component +```typescript +import { BarChart } from '@databricks/appkit-ui/react'; + +``` + +Run `npm run dev` to regenerate types after schema changes. + +## Key References + +Load these when implementing specific features: +- [SQL Queries](references/sql-queries.md) - query files, schemas, parameterization, sql.* helpers +- [AppKit SDK](references/appkit-sdk.md) - imports, server setup, useAnalyticsQuery hook +- [Frontend](references/frontend.md) - visualization components, styling, Radix constraints +- [tRPC](references/trpc.md) - custom endpoints for mutations, Databricks APIs +- [Testing](references/testing.md) - vitest unit tests, Playwright smoke tests +- [Authentication](references/authentication.md) - profiles, OAuth, troubleshooting + +## Critical Rules + +1. **SQL for data retrieval**: Always use `config/queries/` + visualization components. Never use tRPC for SELECT queries. +2. **Numeric types**: All SQL numbers return as strings in JSON. Always convert: `Number(row.amount).toFixed(2)` +3. **Type imports**: Use `import type { ... }` for type-only imports (verbatimModuleSyntax is enabled). +4. **App name**: Must be ≤26 characters (dev- prefix adds 4 chars, max 30 total). +5. **Validate before deploy**: Always run `databricks experimental aitools tools validate ./` first. + +## Decision Tree + +- **Display data from SQL?** + - Chart/Table → Use `BarChart`, `LineChart`, `DataTable` components + - Custom layout (KPIs, cards) → Use `useAnalyticsQuery` hook +- **Call Databricks API?** → Use tRPC (serving endpoints, MLflow, Jobs API) +- **Modify data?** → Use tRPC mutations diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md new file mode 100644 index 0000000000..5ab00768e1 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md @@ -0,0 +1,86 @@ +# Databricks App Kit SDK + +## TypeScript Import Rules + +This template uses strict TypeScript settings with `verbatimModuleSyntax: true`. **Always use `import type` for type-only imports**. + +Template enforces `noUnusedLocals` - remove unused imports immediately or build fails. + +```typescript +// ✅ CORRECT - use import type for types +import type { MyInterface, MyType } from '../../shared/types'; + +// ❌ WRONG - will fail compilation +import { MyInterface, MyType } from '../../shared/types'; +``` + +## Server Setup + +```typescript +import { createApp, server, analytics } from '@databricks/app-kit'; + +const app = await createApp({ + plugins: [ + server({ autoStart: false }), + analytics(), + ], +}); + +// Extend with custom tRPC endpoints if needed +app.server.extend((express: Application) => { + express.use('/trpc', [appRouterMiddleware()]); +}); + +await app.server.start(); +``` + +## useAnalyticsQuery Hook + +**ONLY use when displaying data in a custom way that isn't a chart or table.** + +Use cases: +- Custom HTML layouts (cards, lists, grids) +- Summary statistics and KPIs +- Conditional rendering based on data values +- Data that needs transformation before display + +```typescript +import { useAnalyticsQuery, Skeleton } from '@databricks/app-kit-ui/react'; + +interface QueryResult { column_name: string; value: number; } + +function CustomDisplay() { + const { data, loading, error } = useAnalyticsQuery('query_name', { + start_date: sql.date(Date.now()), + category: sql.string("tools") + }); + + if (loading) return ; + if (error) return
Error: {error}
; + + return ( +
+ {data?.map(row => ( +
+

{row.column_name}

+

{row.value}

+
+ ))} +
+ ); +} +``` + +**API:** + +```typescript +const { data, loading, error } = useAnalyticsQuery( + queryName: string, // SQL file name without .sql extension + params: Record // Query parameters +); +// Returns: { data: T | null, loading: boolean, error: string | null } +``` + +**NOT supported:** +- `enabled` - Query always executes on mount. Use conditional rendering: `{selectedId && }` +- `refetch` - Not available. Re-mount component to re-query. diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md new file mode 100644 index 0000000000..a990b7bf9f --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md @@ -0,0 +1,52 @@ +# Authentication + +## Check Status + +```bash +databricks auth profiles +``` + +## Configure Profile + +```bash +databricks configure --profile +``` + +## OAuth Login + +```bash +databricks auth login --profile --host +``` + +Browser-based OAuth. Recommended for development. + +## Profile Switching + +```bash +# single command +DATABRICKS_CONFIG_PROFILE= databricks + +# or flag +databricks --profile +``` + +## Environment Variables + +| Variable | Purpose | +|----------|---------| +| `DATABRICKS_HOST` | Workspace URL | +| `DATABRICKS_CONFIG_PROFILE` | Profile name | +| `DATABRICKS_WAREHOUSE_ID` | Default warehouse | + +## Troubleshooting + +| Issue | Solution | +|-------|----------| +| No profiles | `databricks configure --profile ` | +| Token expired | `databricks auth login --profile --host ` | +| Wrong workspace | Check `DATABRICKS_CONFIG_PROFILE` or use `--profile` | +| Silent auth fail | `databricks auth profiles` to check status | + +## New Account + +Free account: https://docs.databricks.com/getting-started/free-edition diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md new file mode 100644 index 0000000000..a270b46b9e --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md @@ -0,0 +1,108 @@ +# Frontend Guidelines + +## Visualization Components + +Components from `@databricks/appkit-ui/react` handle data fetching, loading states, and error handling internally. + +Available: `AreaChart`, `BarChart`, `LineChart`, `PieChart`, `RadarChart`, `DataTable` + +**Basic Usage:** + +```typescript +import { BarChart, LineChart, DataTable, Card, CardContent, CardHeader, CardTitle } from '@databricks/appkit-ui/react'; +import { sql } from "@databricks/appkit-ui/js"; + +function MyDashboard() { + return ( +
+ + Sales by Region + + + + + + + Revenue Trend + + + + +
+ ); +} +``` + +Components automatically fetch data, show loading states, display errors, and render with sensible defaults. + +**Custom Visualization (Recharts):** + +```typescript +import { BarChart } from '@databricks/appkit-ui/react'; +import { Bar, XAxis, YAxis, CartesianGrid, Tooltip, Legend } from 'recharts'; + + + + + + + + + + +``` + +Databricks brand colors: `['#40d1f5', '#4462c9', '#EB1600', '#0B2026', '#4A4A4A', '#353a4a']` + +**❌ Don't double-fetch:** + +```typescript +// WRONG - redundant fetch +const { data } = useAnalyticsQuery('sales_data', {}); +return ; + +// CORRECT - let component handle it +return ; +``` + +## Layout Structure + +```tsx +
+

Page Title

+
{/* form inputs */}
+
{/* list items */}
+
+``` + +## Component Organization + +- Shared UI components: `@databricks/appkit-ui/react` +- Feature components: `client/src/components/FeatureName.tsx` +- Split components when logic exceeds ~100 lines or component is reused + +## Radix UI Constraints + +- `SelectItem` cannot have `value=""`. Use sentinel value like `"all"` for "show all" options. + +## Map Libraries (react-leaflet) + +For maps with React 19, use react-leaflet v5: + +```bash +npm install react-leaflet@^5.0.0 leaflet @types/leaflet +``` + +```typescript +import 'leaflet/dist/leaflet.css'; +``` + +## Best Practices + +- Use shadcn/radix components (Button, Input, Card, etc.) for consistent UI, import them from `@databricks/appkit-ui/react`. +- **Use skeleton loaders**: Always use `` components instead of plain "Loading..." text +- Define result types in `shared/types.ts` for reuse between frontend and backend +- Handle nullable fields: `value={field || ''}` for inputs +- Type callbacks explicitly: `onChange={(e: React.ChangeEvent) => ...}` +- Forms should have loading states: `disabled={isLoading}` +- Show empty states with helpful text when no data exists diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md new file mode 100644 index 0000000000..2db77f0bfb --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md @@ -0,0 +1,195 @@ +# SQL Query Files + +**IMPORTANT**: ALWAYS use SQL files in `config/queries/` for data retrieval. NEVER use tRPC for SQL queries. + +- Store ALL SQL queries in `config/queries/` directory +- Name files descriptively: `trip_statistics.sql`, `user_metrics.sql`, `sales_by_region.sql` +- Reference by filename (without extension) in `useAnalyticsQuery` or directly in a visualization component passing it as `queryKey` +- App Kit automatically executes queries against configured Databricks warehouse +- Benefits: Built-in caching, proper connection pooling, better performance + +## Query Schemas + +Define the shape of QUERY RESULTS (not input parameters) in `config/queries/schema.ts` using Zod schemas. + +- **These schemas validate the COLUMNS RETURNED by SQL queries** +- Input parameters are passed separately to `useAnalyticsQuery()` as the second argument +- Schema field names must match your SQL SELECT column names/aliases + +Example: + +```typescript +import { z } from 'zod'; + +export const querySchemas = { + mocked_sales: z.array( + z.object({ + max_month_num: z.number().min(1).max(12), + }) + ), + + hello_world: z.array( + z.object({ + value: z.string(), + }) + ), +}; +``` + +**IMPORTANT: Refreshing Type Definitions** + +After adding or modifying query schemas in `config/queries/schema.ts`: + +1. **DO NOT** manually edit `client/src/appKitTypes.d.ts` - this file is auto-generated +2. Run `npm run dev` to automatically regenerate the TypeScript type definitions +3. The dev server will scan your SQL files and schema definitions and update `appKitTypes.d.ts` accordingly + +## SQL Type Handling (Critical) + +**ALL numeric values from Databricks SQL are returned as STRINGS in JSON responses.** This includes results from `ROUND()`, `AVG()`, `SUM()`, `COUNT()`, etc. Always convert before using numeric methods: + +```typescript +// ❌ WRONG - fails at runtime +{row.total_amount.toFixed(2)} + +// ✅ CORRECT - convert to number first +{Number(row.total_amount).toFixed(2)} +``` + +**Helper Functions:** + +Use the helpers from `shared/types.ts` for consistent formatting: + +```typescript +import { toNumber, formatCurrency, formatPercent } from '../../shared/types'; + +// Convert to number +const amount = toNumber(row.amount); // "123.45" → 123.45 + +// Format as currency +const formatted = formatCurrency(row.amount); // "123.45" → "$123.45" + +// Format as percentage +const percent = formatPercent(row.rate); // "85.5" → "85.5%" +``` + +## Query Parameterization + +SQL queries can accept parameters to make them dynamic and reusable. + +**Key Points:** +- Parameters use colon prefix: `:parameter_name` +- Databricks infers types from values automatically +- For optional string parameters, use pattern: `(:param = '' OR column = :param)` +- **For optional date parameters, use sentinel dates** (`'1900-01-01'` and `'9999-12-31'`) instead of empty strings + +### SQL Parameter Syntax + +```sql +-- config/queries/filtered_data.sql +SELECT * +FROM my_table +WHERE column_value >= :min_value + AND column_value <= :max_value + AND category = :category + AND (:optional_filter = '' OR status = :optional_filter) +``` + +### Frontend Parameter Passing + +```typescript +import { sql } from "@databricks/appkit-ui/js"; + +const { data } = useAnalyticsQuery('filtered_data', { + min_value: sql.number(minValue), + max_value: sql.number(maxValue), + category: sql.string(category), + optional_filter: sql.string(optionalFilter || ''), // empty string for optional params +}); +``` + +### Date Parameters + +Use `sql.date()` for date parameters with `YYYY-MM-DD` format strings. + +**Frontend - Using Date Parameters:** + +```typescript +import { sql } from '@databricks/appkit-ui/js'; +import { useState } from 'react'; + +function MyComponent() { + const [startDate, setStartDate] = useState('2016-02-01'); + const [endDate, setEndDate] = useState('2016-02-29'); + + const queryParams = { + start_date: sql.date(startDate), // Pass YYYY-MM-DD string to sql.date() + end_date: sql.date(endDate), + }; + + const { data } = useAnalyticsQuery('my_query', queryParams); + + // ... +} +``` + +**SQL - Date Filtering:** + +```sql +-- Filter by date range using DATE() function +SELECT COUNT(*) as trip_count +FROM samples.nyctaxi.trips +WHERE DATE(tpep_pickup_datetime) >= :start_date + AND DATE(tpep_pickup_datetime) <= :end_date +``` + +**Date Helper Functions:** + +```typescript +// Helper to get dates relative to today +const daysAgo = (n: number) => { + const date = new Date(Date.now() - n * 86400000); + return sql.date(date) +}; + +const params = { + start_date: daysAgo(7), // 7 days ago + end_date: sql.date(daysAgo(0)), // Today +}; +``` + +### Optional Date Parameters - Use Sentinel Dates + +Databricks App Kit validates parameter types before query execution. **DO NOT use empty strings (`''`) for optional date parameters** as this causes validation errors. + +**✅ CORRECT - Use Sentinel Dates:** + +```typescript +// Frontend: Use sentinel dates for "no filter" instead of empty strings +const revenueParams = { + group_by: 'month', + start_date: sql.date('1900-01-01'), // Sentinel: effectively no lower bound + end_date: sql.date('9999-12-31'), // Sentinel: effectively no upper bound + country: sql.string(country || ''), + property_type: sql.string(propertyType || ''), +}; +``` + +```sql +-- SQL: Simple comparison since sentinel dates are always valid +WHERE b.check_in >= CAST(:start_date AS DATE) + AND b.check_in <= CAST(:end_date AS DATE) +``` + +**Why Sentinel Dates Work:** +- `1900-01-01` is before any real data (effectively no lower bound filter) +- `9999-12-31` is after any real data (effectively no upper bound filter) +- Always valid DATE types, so no parameter validation errors +- All real dates fall within this range, so no filtering occurs + +**Parameter Types Summary:** +- ALWAYS use sql.* helper functions from the `@databricks/appkit-ui/js` package to define SQL parameters +- **Strings/Numbers**: Use directly in SQL with `:param_name` +- **Dates**: Use with `CAST(:param AS DATE)` in SQL +- **Optional Strings**: Use empty string default, check with `(:param = '' OR column = :param)` +- **Optional Dates**: Use sentinel dates (`sql.date('1900-01-01')` and `sql.date('9999-12-31')`) instead of empty strings diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md new file mode 100644 index 0000000000..b1a4fea219 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md @@ -0,0 +1,58 @@ +# Testing Guidelines + +## Unit Tests (Vitest) + +**CRITICAL**: Use vitest for all tests. Put tests next to the code (e.g. src/\*.test.ts) + +```typescript +import { describe, it, expect } from 'vitest'; + +describe('Feature Name', () => { + it('should do something', () => { + expect(true).toBe(true); + }); + + it('should handle async operations', async () => { + const result = await someAsyncFunction(); + expect(result).toBeDefined(); + }); +}); +``` + +**Best Practices:** +- Use `describe` blocks to group related tests +- Use `it` for individual test cases +- Use `expect` for assertions +- Tests run with `npm test` (runs `vitest run`) + +❌ **Do not write unit tests for:** +- SQL files under `config/queries/` - little value in testing static SQL +- Types associated with queries - these are just schema definitions + +## Smoke Test (Playwright) + +The template includes a smoke test at `tests/smoke.spec.ts` that verifies the app loads correctly. + +**What the smoke test does:** +- Opens the app +- Waits for data to load (SQL query results) +- Verifies key UI elements are visible +- Captures screenshots and console logs to `.smoke-test/` directory +- Always captures artifacts, even on test failure + +**When customizing the app**, update `tests/smoke.spec.ts` to match your UI: +- Change heading selector to match your app title (replace 'Minimal Databricks App') +- Update data assertions to match your query results (replace 'hello world' check) +- Keep the test simple - just verify app loads and displays data +- The default test expects specific template content; update these expectations after customization + +**Keep smoke tests simple:** +- Only verify that the app loads and displays initial data +- Wait for key elements to appear (page title, main content) +- Capture artifacts for debugging +- Run quickly (< 5 seconds) + +**For extended E2E tests:** +- Create separate test files in `tests/` directory (e.g., `tests/user-flow.spec.ts`) +- Use `npm run test:e2e` to run all Playwright tests +- Keep complex user flows, interactions, and edge cases out of the smoke test diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md new file mode 100644 index 0000000000..acfb68c1b6 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md @@ -0,0 +1,95 @@ +# tRPC for Custom Endpoints + +**CRITICAL**: Do NOT use tRPC for SQL queries or data retrieval. Use `config/queries/` + `useAnalyticsQuery` instead. + +Use tRPC ONLY for: + +- **Mutations**: Creating, updating, or deleting data (INSERT, UPDATE, DELETE) +- **External APIs**: Calling Databricks APIs (serving endpoints, jobs, MLflow, etc.) +- **Complex business logic**: Multi-step operations that cannot be expressed in SQL +- **File operations**: File uploads, processing, transformations +- **Custom computations**: Operations requiring TypeScript/Node.js logic + +## Server-side Pattern + +```typescript +// server/trpc.ts +import { initTRPC } from '@trpc/server'; +import { getRequestContext } from '@databricks/appkit'; +import { z } from 'zod'; + +const t = initTRPC.create({ transformer: superjson }); +const publicProcedure = t.procedure; + +export const appRouter = t.router({ + // Example: Query a serving endpoint + queryModel: publicProcedure.input(z.object({ prompt: z.string() })).query(async ({ input: { prompt } }) => { + const { serviceDatabricksClient: client } = getRequestContext(); + const response = await client.servingEndpoints.query({ + name: 'your-endpoint-name', + messages: [{ role: 'user', content: prompt }], + }); + return response; + }), + + // Example: Mutation + createRecord: publicProcedure.input(z.object({ name: z.string() })).mutation(async ({ input }) => { + // Custom logic here + return { success: true, id: 123 }; + }), +}); +``` + +## Client-side Pattern + +```typescript +// client/src/components/MyComponent.tsx +import { trpc } from '@/lib/trpc'; +import { useState, useEffect } from 'react'; + +function MyComponent() { + const [result, setResult] = useState(null); + + useEffect(() => { + trpc.queryModel + .query({ prompt: "Hello" }) + .then(setResult) + .catch(console.error); + }, []); + + const handleCreate = async () => { + await trpc.createRecord.mutate({ name: "test" }); + }; + + return
{/* component JSX */}
; +} +``` + +## Decision Tree for Data Operations + +1. **Need to display data from SQL?** + - **Chart or Table?** → Use visualization components (`BarChart`, `LineChart`, `DataTable`, etc.) + - **Custom display (KPIs, cards, lists)?** → Use `useAnalyticsQuery` hook + - **Never** use tRPC for SQL SELECT statements + +2. **Need to call a Databricks API?** → Use tRPC + - Serving endpoints (model inference) + - MLflow operations + - Jobs API + - Workspace API + +3. **Need to modify data?** → Use tRPC mutations + - INSERT, UPDATE, DELETE operations + - Multi-step transactions + - Business logic with side effects + +4. **Need non-SQL custom logic?** → Use tRPC + - File processing + - External API calls + - Complex computations in TypeScript + +**Summary:** +- ✅ SQL queries → Visualization components or `useAnalyticsQuery` +- ✅ Databricks APIs → tRPC +- ✅ Data mutations → tRPC +- ❌ SQL queries → tRPC (NEVER do this) diff --git a/experimental/aitools/lib/agent_skills/embed.go b/experimental/aitools/lib/agent_skills/embed.go new file mode 100644 index 0000000000..0777084b30 --- /dev/null +++ b/experimental/aitools/lib/agent_skills/embed.go @@ -0,0 +1,8 @@ +package agent_skills + +import "embed" + +// SkillsFS embeds all installable agent skills. +// +//go:embed all:databricks-apps +var SkillsFS embed.FS From 39e8bb096e5929247b3bd190aeabb12dd61eb03b Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Tue, 13 Jan 2026 17:53:13 +0100 Subject: [PATCH 2/6] reuse appkit template docs for skill references --- experimental/aitools/cmd/skills.go | 35 ++++ .../databricks-apps/references/appkit-sdk.md | 86 -------- .../databricks-apps/references/frontend.md | 108 ---------- .../databricks-apps/references/sql-queries.md | 195 ------------------ .../databricks-apps/references/testing.md | 58 ------ .../databricks-apps/references/trpc.md | 95 --------- experimental/aitools/templates/appkit/docs.go | 8 + 7 files changed, 43 insertions(+), 542 deletions(-) delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md create mode 100644 experimental/aitools/templates/appkit/docs.go diff --git a/experimental/aitools/cmd/skills.go b/experimental/aitools/cmd/skills.go index 9284a43ef3..92ebf358cc 100644 --- a/experimental/aitools/cmd/skills.go +++ b/experimental/aitools/cmd/skills.go @@ -8,6 +8,7 @@ import ( "path/filepath" "github.com/databricks/cli/experimental/aitools/lib/agent_skills" + appkitdocs "github.com/databricks/cli/experimental/aitools/templates/appkit" "github.com/databricks/cli/libs/cmdio" "github.com/fatih/color" "github.com/spf13/cobra" @@ -113,6 +114,7 @@ func installSkill(ctx context.Context, skillName string) error { return fmt.Errorf("failed to create directory: %w", err) } + // copy skill-specific files (SKILL.md, authentication.md, etc.) err = fs.WalkDir(skillFS, ".", func(path string, d fs.DirEntry, err error) error { if err != nil { return err @@ -135,6 +137,39 @@ func installSkill(ctx context.Context, skillName string) error { return fmt.Errorf("failed to copy skill files: %w", err) } + // copy shared docs from appkit template + if err := copySharedDocs(destDir); err != nil { + return fmt.Errorf("failed to copy shared docs: %w", err) + } + cmdio.LogString(ctx, color.GreenString("✓ Installed %q to %s", skillName, destDir)) return nil } + +func copySharedDocs(destDir string) error { + refsDir := filepath.Join(destDir, "references") + if err := os.MkdirAll(refsDir, 0o755); err != nil { + return err + } + + // docs from appkit template to copy as skill references + sharedDocs := []string{ + "appkit-sdk.md", + "frontend.md", + "sql-queries.md", + "testing.md", + "trpc.md", + } + + for _, doc := range sharedDocs { + content, err := appkitdocs.DocsFS.ReadFile("template/{{.project_name}}/docs/" + doc) + if err != nil { + return fmt.Errorf("failed to read %s: %w", doc, err) + } + if err := os.WriteFile(filepath.Join(refsDir, doc), content, 0o644); err != nil { + return fmt.Errorf("failed to write %s: %w", doc, err) + } + } + + return nil +} diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md deleted file mode 100644 index 5ab00768e1..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/appkit-sdk.md +++ /dev/null @@ -1,86 +0,0 @@ -# Databricks App Kit SDK - -## TypeScript Import Rules - -This template uses strict TypeScript settings with `verbatimModuleSyntax: true`. **Always use `import type` for type-only imports**. - -Template enforces `noUnusedLocals` - remove unused imports immediately or build fails. - -```typescript -// ✅ CORRECT - use import type for types -import type { MyInterface, MyType } from '../../shared/types'; - -// ❌ WRONG - will fail compilation -import { MyInterface, MyType } from '../../shared/types'; -``` - -## Server Setup - -```typescript -import { createApp, server, analytics } from '@databricks/app-kit'; - -const app = await createApp({ - plugins: [ - server({ autoStart: false }), - analytics(), - ], -}); - -// Extend with custom tRPC endpoints if needed -app.server.extend((express: Application) => { - express.use('/trpc', [appRouterMiddleware()]); -}); - -await app.server.start(); -``` - -## useAnalyticsQuery Hook - -**ONLY use when displaying data in a custom way that isn't a chart or table.** - -Use cases: -- Custom HTML layouts (cards, lists, grids) -- Summary statistics and KPIs -- Conditional rendering based on data values -- Data that needs transformation before display - -```typescript -import { useAnalyticsQuery, Skeleton } from '@databricks/app-kit-ui/react'; - -interface QueryResult { column_name: string; value: number; } - -function CustomDisplay() { - const { data, loading, error } = useAnalyticsQuery('query_name', { - start_date: sql.date(Date.now()), - category: sql.string("tools") - }); - - if (loading) return ; - if (error) return
Error: {error}
; - - return ( -
- {data?.map(row => ( -
-

{row.column_name}

-

{row.value}

-
- ))} -
- ); -} -``` - -**API:** - -```typescript -const { data, loading, error } = useAnalyticsQuery( - queryName: string, // SQL file name without .sql extension - params: Record // Query parameters -); -// Returns: { data: T | null, loading: boolean, error: string | null } -``` - -**NOT supported:** -- `enabled` - Query always executes on mount. Use conditional rendering: `{selectedId && }` -- `refetch` - Not available. Re-mount component to re-query. diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md deleted file mode 100644 index a270b46b9e..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/frontend.md +++ /dev/null @@ -1,108 +0,0 @@ -# Frontend Guidelines - -## Visualization Components - -Components from `@databricks/appkit-ui/react` handle data fetching, loading states, and error handling internally. - -Available: `AreaChart`, `BarChart`, `LineChart`, `PieChart`, `RadarChart`, `DataTable` - -**Basic Usage:** - -```typescript -import { BarChart, LineChart, DataTable, Card, CardContent, CardHeader, CardTitle } from '@databricks/appkit-ui/react'; -import { sql } from "@databricks/appkit-ui/js"; - -function MyDashboard() { - return ( -
- - Sales by Region - - - - - - - Revenue Trend - - - - -
- ); -} -``` - -Components automatically fetch data, show loading states, display errors, and render with sensible defaults. - -**Custom Visualization (Recharts):** - -```typescript -import { BarChart } from '@databricks/appkit-ui/react'; -import { Bar, XAxis, YAxis, CartesianGrid, Tooltip, Legend } from 'recharts'; - - - - - - - - - - -``` - -Databricks brand colors: `['#40d1f5', '#4462c9', '#EB1600', '#0B2026', '#4A4A4A', '#353a4a']` - -**❌ Don't double-fetch:** - -```typescript -// WRONG - redundant fetch -const { data } = useAnalyticsQuery('sales_data', {}); -return ; - -// CORRECT - let component handle it -return ; -``` - -## Layout Structure - -```tsx -
-

Page Title

-
{/* form inputs */}
-
{/* list items */}
-
-``` - -## Component Organization - -- Shared UI components: `@databricks/appkit-ui/react` -- Feature components: `client/src/components/FeatureName.tsx` -- Split components when logic exceeds ~100 lines or component is reused - -## Radix UI Constraints - -- `SelectItem` cannot have `value=""`. Use sentinel value like `"all"` for "show all" options. - -## Map Libraries (react-leaflet) - -For maps with React 19, use react-leaflet v5: - -```bash -npm install react-leaflet@^5.0.0 leaflet @types/leaflet -``` - -```typescript -import 'leaflet/dist/leaflet.css'; -``` - -## Best Practices - -- Use shadcn/radix components (Button, Input, Card, etc.) for consistent UI, import them from `@databricks/appkit-ui/react`. -- **Use skeleton loaders**: Always use `` components instead of plain "Loading..." text -- Define result types in `shared/types.ts` for reuse between frontend and backend -- Handle nullable fields: `value={field || ''}` for inputs -- Type callbacks explicitly: `onChange={(e: React.ChangeEvent) => ...}` -- Forms should have loading states: `disabled={isLoading}` -- Show empty states with helpful text when no data exists diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md deleted file mode 100644 index 2db77f0bfb..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/sql-queries.md +++ /dev/null @@ -1,195 +0,0 @@ -# SQL Query Files - -**IMPORTANT**: ALWAYS use SQL files in `config/queries/` for data retrieval. NEVER use tRPC for SQL queries. - -- Store ALL SQL queries in `config/queries/` directory -- Name files descriptively: `trip_statistics.sql`, `user_metrics.sql`, `sales_by_region.sql` -- Reference by filename (without extension) in `useAnalyticsQuery` or directly in a visualization component passing it as `queryKey` -- App Kit automatically executes queries against configured Databricks warehouse -- Benefits: Built-in caching, proper connection pooling, better performance - -## Query Schemas - -Define the shape of QUERY RESULTS (not input parameters) in `config/queries/schema.ts` using Zod schemas. - -- **These schemas validate the COLUMNS RETURNED by SQL queries** -- Input parameters are passed separately to `useAnalyticsQuery()` as the second argument -- Schema field names must match your SQL SELECT column names/aliases - -Example: - -```typescript -import { z } from 'zod'; - -export const querySchemas = { - mocked_sales: z.array( - z.object({ - max_month_num: z.number().min(1).max(12), - }) - ), - - hello_world: z.array( - z.object({ - value: z.string(), - }) - ), -}; -``` - -**IMPORTANT: Refreshing Type Definitions** - -After adding or modifying query schemas in `config/queries/schema.ts`: - -1. **DO NOT** manually edit `client/src/appKitTypes.d.ts` - this file is auto-generated -2. Run `npm run dev` to automatically regenerate the TypeScript type definitions -3. The dev server will scan your SQL files and schema definitions and update `appKitTypes.d.ts` accordingly - -## SQL Type Handling (Critical) - -**ALL numeric values from Databricks SQL are returned as STRINGS in JSON responses.** This includes results from `ROUND()`, `AVG()`, `SUM()`, `COUNT()`, etc. Always convert before using numeric methods: - -```typescript -// ❌ WRONG - fails at runtime -{row.total_amount.toFixed(2)} - -// ✅ CORRECT - convert to number first -{Number(row.total_amount).toFixed(2)} -``` - -**Helper Functions:** - -Use the helpers from `shared/types.ts` for consistent formatting: - -```typescript -import { toNumber, formatCurrency, formatPercent } from '../../shared/types'; - -// Convert to number -const amount = toNumber(row.amount); // "123.45" → 123.45 - -// Format as currency -const formatted = formatCurrency(row.amount); // "123.45" → "$123.45" - -// Format as percentage -const percent = formatPercent(row.rate); // "85.5" → "85.5%" -``` - -## Query Parameterization - -SQL queries can accept parameters to make them dynamic and reusable. - -**Key Points:** -- Parameters use colon prefix: `:parameter_name` -- Databricks infers types from values automatically -- For optional string parameters, use pattern: `(:param = '' OR column = :param)` -- **For optional date parameters, use sentinel dates** (`'1900-01-01'` and `'9999-12-31'`) instead of empty strings - -### SQL Parameter Syntax - -```sql --- config/queries/filtered_data.sql -SELECT * -FROM my_table -WHERE column_value >= :min_value - AND column_value <= :max_value - AND category = :category - AND (:optional_filter = '' OR status = :optional_filter) -``` - -### Frontend Parameter Passing - -```typescript -import { sql } from "@databricks/appkit-ui/js"; - -const { data } = useAnalyticsQuery('filtered_data', { - min_value: sql.number(minValue), - max_value: sql.number(maxValue), - category: sql.string(category), - optional_filter: sql.string(optionalFilter || ''), // empty string for optional params -}); -``` - -### Date Parameters - -Use `sql.date()` for date parameters with `YYYY-MM-DD` format strings. - -**Frontend - Using Date Parameters:** - -```typescript -import { sql } from '@databricks/appkit-ui/js'; -import { useState } from 'react'; - -function MyComponent() { - const [startDate, setStartDate] = useState('2016-02-01'); - const [endDate, setEndDate] = useState('2016-02-29'); - - const queryParams = { - start_date: sql.date(startDate), // Pass YYYY-MM-DD string to sql.date() - end_date: sql.date(endDate), - }; - - const { data } = useAnalyticsQuery('my_query', queryParams); - - // ... -} -``` - -**SQL - Date Filtering:** - -```sql --- Filter by date range using DATE() function -SELECT COUNT(*) as trip_count -FROM samples.nyctaxi.trips -WHERE DATE(tpep_pickup_datetime) >= :start_date - AND DATE(tpep_pickup_datetime) <= :end_date -``` - -**Date Helper Functions:** - -```typescript -// Helper to get dates relative to today -const daysAgo = (n: number) => { - const date = new Date(Date.now() - n * 86400000); - return sql.date(date) -}; - -const params = { - start_date: daysAgo(7), // 7 days ago - end_date: sql.date(daysAgo(0)), // Today -}; -``` - -### Optional Date Parameters - Use Sentinel Dates - -Databricks App Kit validates parameter types before query execution. **DO NOT use empty strings (`''`) for optional date parameters** as this causes validation errors. - -**✅ CORRECT - Use Sentinel Dates:** - -```typescript -// Frontend: Use sentinel dates for "no filter" instead of empty strings -const revenueParams = { - group_by: 'month', - start_date: sql.date('1900-01-01'), // Sentinel: effectively no lower bound - end_date: sql.date('9999-12-31'), // Sentinel: effectively no upper bound - country: sql.string(country || ''), - property_type: sql.string(propertyType || ''), -}; -``` - -```sql --- SQL: Simple comparison since sentinel dates are always valid -WHERE b.check_in >= CAST(:start_date AS DATE) - AND b.check_in <= CAST(:end_date AS DATE) -``` - -**Why Sentinel Dates Work:** -- `1900-01-01` is before any real data (effectively no lower bound filter) -- `9999-12-31` is after any real data (effectively no upper bound filter) -- Always valid DATE types, so no parameter validation errors -- All real dates fall within this range, so no filtering occurs - -**Parameter Types Summary:** -- ALWAYS use sql.* helper functions from the `@databricks/appkit-ui/js` package to define SQL parameters -- **Strings/Numbers**: Use directly in SQL with `:param_name` -- **Dates**: Use with `CAST(:param AS DATE)` in SQL -- **Optional Strings**: Use empty string default, check with `(:param = '' OR column = :param)` -- **Optional Dates**: Use sentinel dates (`sql.date('1900-01-01')` and `sql.date('9999-12-31')`) instead of empty strings diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md deleted file mode 100644 index b1a4fea219..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/testing.md +++ /dev/null @@ -1,58 +0,0 @@ -# Testing Guidelines - -## Unit Tests (Vitest) - -**CRITICAL**: Use vitest for all tests. Put tests next to the code (e.g. src/\*.test.ts) - -```typescript -import { describe, it, expect } from 'vitest'; - -describe('Feature Name', () => { - it('should do something', () => { - expect(true).toBe(true); - }); - - it('should handle async operations', async () => { - const result = await someAsyncFunction(); - expect(result).toBeDefined(); - }); -}); -``` - -**Best Practices:** -- Use `describe` blocks to group related tests -- Use `it` for individual test cases -- Use `expect` for assertions -- Tests run with `npm test` (runs `vitest run`) - -❌ **Do not write unit tests for:** -- SQL files under `config/queries/` - little value in testing static SQL -- Types associated with queries - these are just schema definitions - -## Smoke Test (Playwright) - -The template includes a smoke test at `tests/smoke.spec.ts` that verifies the app loads correctly. - -**What the smoke test does:** -- Opens the app -- Waits for data to load (SQL query results) -- Verifies key UI elements are visible -- Captures screenshots and console logs to `.smoke-test/` directory -- Always captures artifacts, even on test failure - -**When customizing the app**, update `tests/smoke.spec.ts` to match your UI: -- Change heading selector to match your app title (replace 'Minimal Databricks App') -- Update data assertions to match your query results (replace 'hello world' check) -- Keep the test simple - just verify app loads and displays data -- The default test expects specific template content; update these expectations after customization - -**Keep smoke tests simple:** -- Only verify that the app loads and displays initial data -- Wait for key elements to appear (page title, main content) -- Capture artifacts for debugging -- Run quickly (< 5 seconds) - -**For extended E2E tests:** -- Create separate test files in `tests/` directory (e.g., `tests/user-flow.spec.ts`) -- Use `npm run test:e2e` to run all Playwright tests -- Keep complex user flows, interactions, and edge cases out of the smoke test diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md deleted file mode 100644 index acfb68c1b6..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/trpc.md +++ /dev/null @@ -1,95 +0,0 @@ -# tRPC for Custom Endpoints - -**CRITICAL**: Do NOT use tRPC for SQL queries or data retrieval. Use `config/queries/` + `useAnalyticsQuery` instead. - -Use tRPC ONLY for: - -- **Mutations**: Creating, updating, or deleting data (INSERT, UPDATE, DELETE) -- **External APIs**: Calling Databricks APIs (serving endpoints, jobs, MLflow, etc.) -- **Complex business logic**: Multi-step operations that cannot be expressed in SQL -- **File operations**: File uploads, processing, transformations -- **Custom computations**: Operations requiring TypeScript/Node.js logic - -## Server-side Pattern - -```typescript -// server/trpc.ts -import { initTRPC } from '@trpc/server'; -import { getRequestContext } from '@databricks/appkit'; -import { z } from 'zod'; - -const t = initTRPC.create({ transformer: superjson }); -const publicProcedure = t.procedure; - -export const appRouter = t.router({ - // Example: Query a serving endpoint - queryModel: publicProcedure.input(z.object({ prompt: z.string() })).query(async ({ input: { prompt } }) => { - const { serviceDatabricksClient: client } = getRequestContext(); - const response = await client.servingEndpoints.query({ - name: 'your-endpoint-name', - messages: [{ role: 'user', content: prompt }], - }); - return response; - }), - - // Example: Mutation - createRecord: publicProcedure.input(z.object({ name: z.string() })).mutation(async ({ input }) => { - // Custom logic here - return { success: true, id: 123 }; - }), -}); -``` - -## Client-side Pattern - -```typescript -// client/src/components/MyComponent.tsx -import { trpc } from '@/lib/trpc'; -import { useState, useEffect } from 'react'; - -function MyComponent() { - const [result, setResult] = useState(null); - - useEffect(() => { - trpc.queryModel - .query({ prompt: "Hello" }) - .then(setResult) - .catch(console.error); - }, []); - - const handleCreate = async () => { - await trpc.createRecord.mutate({ name: "test" }); - }; - - return
{/* component JSX */}
; -} -``` - -## Decision Tree for Data Operations - -1. **Need to display data from SQL?** - - **Chart or Table?** → Use visualization components (`BarChart`, `LineChart`, `DataTable`, etc.) - - **Custom display (KPIs, cards, lists)?** → Use `useAnalyticsQuery` hook - - **Never** use tRPC for SQL SELECT statements - -2. **Need to call a Databricks API?** → Use tRPC - - Serving endpoints (model inference) - - MLflow operations - - Jobs API - - Workspace API - -3. **Need to modify data?** → Use tRPC mutations - - INSERT, UPDATE, DELETE operations - - Multi-step transactions - - Business logic with side effects - -4. **Need non-SQL custom logic?** → Use tRPC - - File processing - - External API calls - - Complex computations in TypeScript - -**Summary:** -- ✅ SQL queries → Visualization components or `useAnalyticsQuery` -- ✅ Databricks APIs → tRPC -- ✅ Data mutations → tRPC -- ❌ SQL queries → tRPC (NEVER do this) diff --git a/experimental/aitools/templates/appkit/docs.go b/experimental/aitools/templates/appkit/docs.go new file mode 100644 index 0000000000..1070628cc1 --- /dev/null +++ b/experimental/aitools/templates/appkit/docs.go @@ -0,0 +1,8 @@ +package appkit + +import "embed" + +// DocsFS embeds the appkit template documentation. +// +//go:embed template/{{.project_name}}/docs/*.md +var DocsFS embed.FS From ef1b64b77e443f135ab67d8fbc9cdfbcc6c3b7fb Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Thu, 22 Jan 2026 15:13:19 +0100 Subject: [PATCH 3/6] aitools: fetch skills from GitHub instead of embedding Skills now fetched from databricks/databricks-agent-skills repo. --- experimental/aitools/cmd/skills.go | 278 +++++++++++++----- .../lib/agent_skills/databricks-apps/SKILL.md | 94 ------ .../references/authentication.md | 52 ---- .../aitools/lib/agent_skills/embed.go | 8 - experimental/aitools/templates/appkit/docs.go | 8 - 5 files changed, 210 insertions(+), 230 deletions(-) delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md delete mode 100644 experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md delete mode 100644 experimental/aitools/lib/agent_skills/embed.go delete mode 100644 experimental/aitools/templates/appkit/docs.go diff --git a/experimental/aitools/cmd/skills.go b/experimental/aitools/cmd/skills.go index 92ebf358cc..7709bf0a97 100644 --- a/experimental/aitools/cmd/skills.go +++ b/experimental/aitools/cmd/skills.go @@ -2,18 +2,45 @@ package mcp import ( "context" + "encoding/json" "fmt" - "io/fs" + "io" + "net/http" "os" "path/filepath" + "strings" + "time" - "github.com/databricks/cli/experimental/aitools/lib/agent_skills" - appkitdocs "github.com/databricks/cli/experimental/aitools/templates/appkit" "github.com/databricks/cli/libs/cmdio" "github.com/fatih/color" "github.com/spf13/cobra" ) +const ( + skillsRepoOwner = "databricks" + skillsRepoName = "databricks-agent-skills" + skillsRepoPath = "skills" + defaultSkillsRepoBranch = "main" +) + +func getSkillsBranch() string { + if branch := os.Getenv("DATABRICKS_SKILLS_BRANCH"); branch != "" { + return branch + } + return defaultSkillsRepoBranch +} + +type Manifest struct { + Version string `json:"version"` + UpdatedAt string `json:"updated_at"` + Skills map[string]SkillMeta `json:"skills"` +} + +type SkillMeta struct { + Version string `json:"version"` + UpdatedAt string `json:"updated_at"` +} + func newSkillsCmd() *cobra.Command { cmd := &cobra.Command{ Use: "skills", @@ -39,53 +66,194 @@ func newSkillsListCmd() *cobra.Command { func newSkillsInstallCmd() *cobra.Command { return &cobra.Command{ - Use: "install", - Short: "Install all Databricks skills for Claude Code", - Long: `Install all Databricks skills to ~/.claude/skills/ for use with Claude Code.`, + Use: "install [skill-name]", + Short: "Install Databricks skills for Claude Code", + Long: `Install Databricks skills to ~/.claude/skills/ for use with Claude Code. If no skill name is provided, installs all available skills.`, RunE: func(cmd *cobra.Command, args []string) error { + if len(args) > 0 { + return installSkill(cmd.Context(), args[0]) + } return installAllSkills(cmd.Context()) }, } } -func getSkillNames() ([]string, error) { - entries, err := fs.ReadDir(agent_skills.SkillsFS, ".") +func fetchManifest(ctx context.Context) (*Manifest, error) { + url := fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s/%s/manifest.json", + skillsRepoOwner, skillsRepoName, getSkillsBranch(), skillsRepoPath) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + client := &http.Client{Timeout: 30 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to fetch manifest: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to fetch manifest: HTTP %d", resp.StatusCode) + } + + var manifest Manifest + if err := json.NewDecoder(resp.Body).Decode(&manifest); err != nil { + return nil, fmt.Errorf("failed to parse manifest: %w", err) + } + + return &manifest, nil +} + +func fetchSkillFile(ctx context.Context, skillName, filePath string) ([]byte, error) { + url := fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s/%s/%s/%s", + skillsRepoOwner, skillsRepoName, getSkillsBranch(), skillsRepoPath, skillName, filePath) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + client := &http.Client{Timeout: 30 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to fetch %s: %w", filePath, err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to fetch %s: HTTP %d", filePath, resp.StatusCode) + } + + return io.ReadAll(resp.Body) +} + +func fetchSkillFileList(ctx context.Context, skillName string) ([]string, error) { + // use GitHub API to list files in skill directory + skillPath := skillsRepoPath + "/" + skillName + url := fmt.Sprintf("https://api.github.com/repos/%s/%s/contents/%s?ref=%s", + skillsRepoOwner, skillsRepoName, skillPath, getSkillsBranch()) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, err + } + req.Header.Set("Accept", "application/vnd.github.v3+json") + + client := &http.Client{Timeout: 30 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to list skill files: HTTP %d", resp.StatusCode) + } + + var items []struct { + Path string `json:"path"` + Type string `json:"type"` + } + if err := json.NewDecoder(resp.Body).Decode(&items); err != nil { + return nil, err + } + + var files []string + for _, item := range items { + switch item.Type { + case "file": + // strip skills/skill-name prefix from path + relPath := strings.TrimPrefix(item.Path, skillPath+"/") + files = append(files, relPath) + case "dir": + // recursively list subdirectory + subFiles, err := fetchSubdirFiles(ctx, item.Path) + if err != nil { + return nil, err + } + for _, sf := range subFiles { + relPath := strings.TrimPrefix(sf, skillPath+"/") + files = append(files, relPath) + } + } + } + + return files, nil +} + +func fetchSubdirFiles(ctx context.Context, dirPath string) ([]string, error) { + url := fmt.Sprintf("https://api.github.com/repos/%s/%s/contents/%s?ref=%s", + skillsRepoOwner, skillsRepoName, dirPath, getSkillsBranch()) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { - return nil, fmt.Errorf("failed to read skills: %w", err) + return nil, err + } + req.Header.Set("Accept", "application/vnd.github.v3+json") + + client := &http.Client{Timeout: 30 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("failed to list directory %s: HTTP %d", dirPath, resp.StatusCode) + } + + var items []struct { + Path string `json:"path"` + Type string `json:"type"` + } + if err := json.NewDecoder(resp.Body).Decode(&items); err != nil { + return nil, err } - var names []string - for _, entry := range entries { - if entry.IsDir() { - names = append(names, entry.Name()) + var files []string + for _, item := range items { + switch item.Type { + case "file": + files = append(files, item.Path) + case "dir": + subFiles, err := fetchSubdirFiles(ctx, item.Path) + if err != nil { + return nil, err + } + files = append(files, subFiles...) } } - return names, nil + + return files, nil } func listSkills(ctx context.Context) error { - names, err := getSkillNames() + manifest, err := fetchManifest(ctx) if err != nil { return err } cmdio.LogString(ctx, "Available skills:") cmdio.LogString(ctx, "") - for _, name := range names { - cmdio.LogString(ctx, " "+name) + + for name, meta := range manifest.Skills { + cmdio.LogString(ctx, fmt.Sprintf(" %s (v%s)", name, meta.Version)) } + cmdio.LogString(ctx, "") cmdio.LogString(ctx, "Install all with: databricks experimental aitools skills install") + cmdio.LogString(ctx, "Install one with: databricks experimental aitools skills install ") return nil } func installAllSkills(ctx context.Context) error { - names, err := getSkillNames() + manifest, err := fetchManifest(ctx) if err != nil { return err } - for _, name := range names { + for name := range manifest.Skills { if err := installSkill(ctx, name); err != nil { return err } @@ -94,15 +262,21 @@ func installAllSkills(ctx context.Context) error { } func installSkill(ctx context.Context, skillName string) error { - skillFS, err := fs.Sub(agent_skills.SkillsFS, skillName) + manifest, err := fetchManifest(ctx) if err != nil { - return fmt.Errorf("skill %q not found", skillName) + return err } - if _, err := fs.Stat(skillFS, "SKILL.md"); err != nil { + if _, ok := manifest.Skills[skillName]; !ok { return fmt.Errorf("skill %q not found", skillName) } + // get list of files in skill + files, err := fetchSkillFileList(ctx, skillName) + if err != nil { + return fmt.Errorf("failed to list skill files: %w", err) + } + homeDir, err := os.UserHomeDir() if err != nil { return fmt.Errorf("failed to get home directory: %w", err) @@ -110,66 +284,34 @@ func installSkill(ctx context.Context, skillName string) error { destDir := filepath.Join(homeDir, ".claude", "skills", skillName) + // remove existing skill directory for clean install + if err := os.RemoveAll(destDir); err != nil { + return fmt.Errorf("failed to remove existing skill: %w", err) + } + if err := os.MkdirAll(destDir, 0o755); err != nil { return fmt.Errorf("failed to create directory: %w", err) } - // copy skill-specific files (SKILL.md, authentication.md, etc.) - err = fs.WalkDir(skillFS, ".", func(path string, d fs.DirEntry, err error) error { + // download all files + for _, file := range files { + content, err := fetchSkillFile(ctx, skillName, file) if err != nil { return err } - destPath := filepath.Join(destDir, path) + destPath := filepath.Join(destDir, file) - if d.IsDir() { - return os.MkdirAll(destPath, 0o755) + // create parent directories if needed + if err := os.MkdirAll(filepath.Dir(destPath), 0o755); err != nil { + return fmt.Errorf("failed to create directory: %w", err) } - content, err := fs.ReadFile(skillFS, path) - if err != nil { - return fmt.Errorf("failed to read %s: %w", path, err) + if err := os.WriteFile(destPath, content, 0o644); err != nil { + return fmt.Errorf("failed to write %s: %w", file, err) } - - return os.WriteFile(destPath, content, 0o644) - }) - if err != nil { - return fmt.Errorf("failed to copy skill files: %w", err) - } - - // copy shared docs from appkit template - if err := copySharedDocs(destDir); err != nil { - return fmt.Errorf("failed to copy shared docs: %w", err) } cmdio.LogString(ctx, color.GreenString("✓ Installed %q to %s", skillName, destDir)) return nil } - -func copySharedDocs(destDir string) error { - refsDir := filepath.Join(destDir, "references") - if err := os.MkdirAll(refsDir, 0o755); err != nil { - return err - } - - // docs from appkit template to copy as skill references - sharedDocs := []string{ - "appkit-sdk.md", - "frontend.md", - "sql-queries.md", - "testing.md", - "trpc.md", - } - - for _, doc := range sharedDocs { - content, err := appkitdocs.DocsFS.ReadFile("template/{{.project_name}}/docs/" + doc) - if err != nil { - return fmt.Errorf("failed to read %s: %w", doc, err) - } - if err := os.WriteFile(filepath.Join(refsDir, doc), content, 0o644); err != nil { - return fmt.Errorf("failed to write %s: %w", doc, err) - } - } - - return nil -} diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md b/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md deleted file mode 100644 index e82a6fcf0f..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/SKILL.md +++ /dev/null @@ -1,94 +0,0 @@ ---- -name: databricks-apps -description: Build full-stack TypeScript apps on Databricks. Use when asked to create dashboards, data apps, analytics tools, or visualizations that query Databricks SQL. Provides project scaffolding, SQL data access patterns, and deployment commands. Invoke BEFORE starting implementation. -metadata: - version: "0.1.0" - min_cli_version: "0.250.0" ---- - -# Databricks Apps Development - -Build TypeScript apps that query Databricks SQL warehouses and deploy to Databricks Apps. - -## Workflow - -1. **Verify auth**: `databricks auth profiles` -2. **Find warehouse**: `databricks sql warehouses list` -3. **Explore data**: `databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE` -4. **Scaffold project**: `databricks experimental aitools tools init-template --name my-app --description "..."` -5. **Develop**: `cd my-app && npm install && npm run dev` -6. **Validate**: `databricks experimental aitools tools validate ./` -7. **Deploy**: `databricks experimental aitools tools deploy` (requires user permission) - -## Data Exploration - -```bash -# list catalogs/schemas/tables -databricks catalogs list -databricks schemas list -databricks tables list - -# discover table schema (columns, types, sample data) -databricks experimental aitools tools discover-schema CATALOG.SCHEMA.TABLE - -# test queries -databricks experimental aitools tools query "SELECT * FROM catalog.schema.table LIMIT 10" -``` - -Note: Use separate arguments for `catalogs/schemas/tables` commands. Dot notation only works in `discover-schema` and `query`. - -## Project Structure - -After scaffolding: -- `server/` - Node.js backend with App Kit and tRPC -- `client/` - React frontend with App Kit hooks -- `config/queries/` - SQL query files -- `shared/` - Shared TypeScript types - -## Adding Visualizations - -**Step 1**: Create SQL file in `config/queries/my_data.sql` -```sql -SELECT category, COUNT(*) as count FROM my_table GROUP BY category -``` - -**Step 2**: Define schema in `config/queries/schema.ts` -```typescript -export const querySchemas = { - my_data: z.array(z.object({ category: z.string(), count: z.number() })), -}; -``` - -**Step 3**: Use visualization component -```typescript -import { BarChart } from '@databricks/appkit-ui/react'; - -``` - -Run `npm run dev` to regenerate types after schema changes. - -## Key References - -Load these when implementing specific features: -- [SQL Queries](references/sql-queries.md) - query files, schemas, parameterization, sql.* helpers -- [AppKit SDK](references/appkit-sdk.md) - imports, server setup, useAnalyticsQuery hook -- [Frontend](references/frontend.md) - visualization components, styling, Radix constraints -- [tRPC](references/trpc.md) - custom endpoints for mutations, Databricks APIs -- [Testing](references/testing.md) - vitest unit tests, Playwright smoke tests -- [Authentication](references/authentication.md) - profiles, OAuth, troubleshooting - -## Critical Rules - -1. **SQL for data retrieval**: Always use `config/queries/` + visualization components. Never use tRPC for SELECT queries. -2. **Numeric types**: All SQL numbers return as strings in JSON. Always convert: `Number(row.amount).toFixed(2)` -3. **Type imports**: Use `import type { ... }` for type-only imports (verbatimModuleSyntax is enabled). -4. **App name**: Must be ≤26 characters (dev- prefix adds 4 chars, max 30 total). -5. **Validate before deploy**: Always run `databricks experimental aitools tools validate ./` first. - -## Decision Tree - -- **Display data from SQL?** - - Chart/Table → Use `BarChart`, `LineChart`, `DataTable` components - - Custom layout (KPIs, cards) → Use `useAnalyticsQuery` hook -- **Call Databricks API?** → Use tRPC (serving endpoints, MLflow, Jobs API) -- **Modify data?** → Use tRPC mutations diff --git a/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md b/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md deleted file mode 100644 index a990b7bf9f..0000000000 --- a/experimental/aitools/lib/agent_skills/databricks-apps/references/authentication.md +++ /dev/null @@ -1,52 +0,0 @@ -# Authentication - -## Check Status - -```bash -databricks auth profiles -``` - -## Configure Profile - -```bash -databricks configure --profile -``` - -## OAuth Login - -```bash -databricks auth login --profile --host -``` - -Browser-based OAuth. Recommended for development. - -## Profile Switching - -```bash -# single command -DATABRICKS_CONFIG_PROFILE= databricks - -# or flag -databricks --profile -``` - -## Environment Variables - -| Variable | Purpose | -|----------|---------| -| `DATABRICKS_HOST` | Workspace URL | -| `DATABRICKS_CONFIG_PROFILE` | Profile name | -| `DATABRICKS_WAREHOUSE_ID` | Default warehouse | - -## Troubleshooting - -| Issue | Solution | -|-------|----------| -| No profiles | `databricks configure --profile ` | -| Token expired | `databricks auth login --profile --host ` | -| Wrong workspace | Check `DATABRICKS_CONFIG_PROFILE` or use `--profile` | -| Silent auth fail | `databricks auth profiles` to check status | - -## New Account - -Free account: https://docs.databricks.com/getting-started/free-edition diff --git a/experimental/aitools/lib/agent_skills/embed.go b/experimental/aitools/lib/agent_skills/embed.go deleted file mode 100644 index 0777084b30..0000000000 --- a/experimental/aitools/lib/agent_skills/embed.go +++ /dev/null @@ -1,8 +0,0 @@ -package agent_skills - -import "embed" - -// SkillsFS embeds all installable agent skills. -// -//go:embed all:databricks-apps -var SkillsFS embed.FS diff --git a/experimental/aitools/templates/appkit/docs.go b/experimental/aitools/templates/appkit/docs.go deleted file mode 100644 index 1070628cc1..0000000000 --- a/experimental/aitools/templates/appkit/docs.go +++ /dev/null @@ -1,8 +0,0 @@ -package appkit - -import "embed" - -// DocsFS embeds the appkit template documentation. -// -//go:embed template/{{.project_name}}/docs/*.md -var DocsFS embed.FS From 17f3e0a8d647c8870760ef6cbfc2bc95de6ccc47 Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Fri, 23 Jan 2026 14:43:01 +0100 Subject: [PATCH 4/6] aitools: multi-agent skills installation - Support 8 agents: Claude Code, Cursor, Windsurf, Cline, Roo Code, Codex CLI, Amp, OpenCode - Auto-detect installed agents and print which were found - Use symlinks when multiple agents detected (canonical location: ~/.databricks/agent-skills/) - Fallback to copy if symlink fails (Windows without admin) - Support GitHub token from env vars or gh CLI for private repo access Co-Authored-By: Claude Opus 4.5 --- experimental/aitools/cmd/skills.go | 330 ++++++++++++++++++++++++++++- 1 file changed, 319 insertions(+), 11 deletions(-) diff --git a/experimental/aitools/cmd/skills.go b/experimental/aitools/cmd/skills.go index 7709bf0a97..5afe94aebb 100644 --- a/experimental/aitools/cmd/skills.go +++ b/experimental/aitools/cmd/skills.go @@ -7,7 +7,9 @@ import ( "io" "net/http" "os" + "os/exec" "path/filepath" + "runtime" "strings" "time" @@ -21,6 +23,7 @@ const ( skillsRepoName = "databricks-agent-skills" skillsRepoPath = "skills" defaultSkillsRepoBranch = "main" + canonicalSkillsDir = ".databricks/agent-skills" // canonical location for symlink source ) func getSkillsBranch() string { @@ -30,6 +33,217 @@ func getSkillsBranch() string { return defaultSkillsRepoBranch } +// getGitHubToken returns GitHub token from environment or gh CLI. +// TODO: once databricks-agent-skills repo is public, replace GitHub API calls +// with raw.githubusercontent.com URLs and remove authentication logic. +func getGitHubToken() string { + // check environment variables first + if token := os.Getenv("GITHUB_TOKEN"); token != "" { + return token + } + if token := os.Getenv("GH_TOKEN"); token != "" { + return token + } + // try gh CLI + out, err := exec.Command("gh", "auth", "token").Output() + if err == nil { + return strings.TrimSpace(string(out)) + } + return "" +} + +// addGitHubAuth adds authentication header if token is available. +func addGitHubAuth(req *http.Request) { + if token := getGitHubToken(); token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } +} + +// AgentConfig defines how to detect and install skills for an agent. +type AgentConfig struct { + Name string + DisplayName string + GlobalSkillDir func() (string, error) // returns global skills directory path + Detect func() bool // returns true if agent is installed +} + +// getHomeDir returns home directory, handling Windows USERPROFILE. +func getHomeDir() (string, error) { + if runtime.GOOS == "windows" { + if userProfile := os.Getenv("USERPROFILE"); userProfile != "" { + return userProfile, nil + } + } + return os.UserHomeDir() +} + +// supportedAgents defines all agents we can install skills to. +var supportedAgents = []AgentConfig{ + { + Name: "claude-code", + DisplayName: "Claude Code", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".claude", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".claude")) + return err == nil + }, + }, + { + Name: "cursor", + DisplayName: "Cursor", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".cursor", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".cursor")) + return err == nil + }, + }, + { + Name: "windsurf", + DisplayName: "Windsurf", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".codeium", "windsurf", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".codeium", "windsurf")) + return err == nil + }, + }, + { + Name: "cline", + DisplayName: "Cline", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".cline", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".cline")) + return err == nil + }, + }, + { + Name: "roo-code", + DisplayName: "Roo Code", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".roo-code", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".roo-code")) + return err == nil + }, + }, + { + Name: "codex", + DisplayName: "Codex CLI", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".codex", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".codex")) + return err == nil + }, + }, + { + Name: "amp", + DisplayName: "Amp", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".amp", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".amp")) + return err == nil + }, + }, + { + Name: "opencode", + DisplayName: "OpenCode", + GlobalSkillDir: func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + return filepath.Join(home, ".config", "opencode", "skills"), nil + }, + Detect: func() bool { + home, err := getHomeDir() + if err != nil { + return false + } + _, err = os.Stat(filepath.Join(home, ".config", "opencode")) + return err == nil + }, + }, +} + +// detectInstalledAgents returns list of agents that are installed on the system. +func detectInstalledAgents() []AgentConfig { + var installed []AgentConfig + for _, agent := range supportedAgents { + if agent.Detect() { + installed = append(installed, agent) + } + } + return installed +} + type Manifest struct { Version string `json:"version"` UpdatedAt string `json:"updated_at"` @@ -44,8 +258,8 @@ type SkillMeta struct { func newSkillsCmd() *cobra.Command { cmd := &cobra.Command{ Use: "skills", - Short: "Manage Databricks skills for Claude Code", - Long: `Manage Databricks skills that can be installed to ~/.claude/skills/ for use with Claude Code.`, + Short: "Manage Databricks skills for coding agents", + Long: `Manage Databricks skills that extend coding agents with Databricks-specific capabilities.`, } cmd.AddCommand(newSkillsListCmd()) @@ -67,8 +281,14 @@ func newSkillsListCmd() *cobra.Command { func newSkillsInstallCmd() *cobra.Command { return &cobra.Command{ Use: "install [skill-name]", - Short: "Install Databricks skills for Claude Code", - Long: `Install Databricks skills to ~/.claude/skills/ for use with Claude Code. If no skill name is provided, installs all available skills.`, + Short: "Install Databricks skills for detected coding agents", + Long: `Install Databricks skills to all detected coding agents. + +Skills are installed globally to each agent's skills directory. +When multiple agents are detected, skills are stored in a canonical location +and symlinked to each agent to avoid duplication. + +Supported agents: Claude Code, Cursor, Windsurf, Cline, Roo Code, Codex CLI, Amp, OpenCode`, RunE: func(cmd *cobra.Command, args []string) error { if len(args) > 0 { return installSkill(cmd.Context(), args[0]) @@ -79,12 +299,16 @@ func newSkillsInstallCmd() *cobra.Command { } func fetchManifest(ctx context.Context) (*Manifest, error) { - url := fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s/%s/manifest.json", - skillsRepoOwner, skillsRepoName, getSkillsBranch(), skillsRepoPath) + // use GitHub API for private repo support + // manifest.json is at repo root, skills are in skillsRepoPath subdirectory + url := fmt.Sprintf("https://api.github.com/repos/%s/%s/contents/manifest.json?ref=%s", + skillsRepoOwner, skillsRepoName, getSkillsBranch()) req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { return nil, fmt.Errorf("failed to create request: %w", err) } + req.Header.Set("Accept", "application/vnd.github.raw+json") + addGitHubAuth(req) client := &http.Client{Timeout: 30 * time.Second} resp, err := client.Do(req) @@ -106,13 +330,16 @@ func fetchManifest(ctx context.Context) (*Manifest, error) { } func fetchSkillFile(ctx context.Context, skillName, filePath string) ([]byte, error) { - url := fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s/%s/%s/%s", - skillsRepoOwner, skillsRepoName, getSkillsBranch(), skillsRepoPath, skillName, filePath) + // use GitHub API for private repo support + url := fmt.Sprintf("https://api.github.com/repos/%s/%s/contents/%s/%s/%s?ref=%s", + skillsRepoOwner, skillsRepoName, skillsRepoPath, skillName, filePath, getSkillsBranch()) req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { return nil, fmt.Errorf("failed to create request: %w", err) } + req.Header.Set("Accept", "application/vnd.github.raw+json") + addGitHubAuth(req) client := &http.Client{Timeout: 30 * time.Second} resp, err := client.Do(req) @@ -139,6 +366,7 @@ func fetchSkillFileList(ctx context.Context, skillName string) ([]string, error) return nil, err } req.Header.Set("Accept", "application/vnd.github.v3+json") + addGitHubAuth(req) client := &http.Client{Timeout: 30 * time.Second} resp, err := client.Do(req) @@ -191,6 +419,7 @@ func fetchSubdirFiles(ctx context.Context, dirPath string) ([]string, error) { return nil, err } req.Header.Set("Accept", "application/vnd.github.v3+json") + addGitHubAuth(req) client := &http.Client{Timeout: 30 * time.Second} resp, err := client.Do(req) @@ -271,19 +500,80 @@ func installSkill(ctx context.Context, skillName string) error { return fmt.Errorf("skill %q not found", skillName) } + // detect installed agents + agents := detectInstalledAgents() + if len(agents) == 0 { + cmdio.LogString(ctx, color.YellowString("No supported coding agents detected.")) + cmdio.LogString(ctx, "") + cmdio.LogString(ctx, "Supported agents: Claude Code, Cursor, Windsurf, Cline, Roo Code, Codex CLI, Amp, OpenCode") + cmdio.LogString(ctx, "Please install at least one coding agent first.") + return nil + } + + // print detected agents + cmdio.LogString(ctx, "Detected coding agents:") + for _, agent := range agents { + cmdio.LogString(ctx, " - "+agent.DisplayName) + } + cmdio.LogString(ctx, "") + // get list of files in skill files, err := fetchSkillFileList(ctx, skillName) if err != nil { return fmt.Errorf("failed to list skill files: %w", err) } - homeDir, err := os.UserHomeDir() + homeDir, err := getHomeDir() if err != nil { return fmt.Errorf("failed to get home directory: %w", err) } - destDir := filepath.Join(homeDir, ".claude", "skills", skillName) + // determine installation strategy + useSymlinks := len(agents) > 1 + var canonicalDir string + if useSymlinks { + // install to canonical location and symlink to each agent + canonicalDir = filepath.Join(homeDir, canonicalSkillsDir, skillName) + if err := installSkillToDir(ctx, skillName, canonicalDir, files); err != nil { + return err + } + } + + // install/symlink to each agent + for _, agent := range agents { + agentSkillDir, err := agent.GlobalSkillDir() + if err != nil { + cmdio.LogString(ctx, color.YellowString("⊘ Skipped %s: %v", agent.DisplayName, err)) + continue + } + + destDir := filepath.Join(agentSkillDir, skillName) + + if useSymlinks { + if err := createSymlink(canonicalDir, destDir); err != nil { + // fallback to copy on symlink failure (e.g., Windows without admin) + cmdio.LogString(ctx, color.YellowString(" Symlink failed for %s, copying instead...", agent.DisplayName)) + if err := installSkillToDir(ctx, skillName, destDir, files); err != nil { + cmdio.LogString(ctx, color.YellowString("⊘ Failed to install for %s: %v", agent.DisplayName, err)) + continue + } + } + cmdio.LogString(ctx, color.GreenString("✓ Installed %q for %s (symlinked)", skillName, agent.DisplayName)) + } else { + // single agent - install directly + if err := installSkillToDir(ctx, skillName, destDir, files); err != nil { + cmdio.LogString(ctx, color.YellowString("⊘ Failed to install for %s: %v", agent.DisplayName, err)) + continue + } + cmdio.LogString(ctx, color.GreenString("✓ Installed %q for %s", skillName, agent.DisplayName)) + } + } + + return nil +} + +func installSkillToDir(ctx context.Context, skillName, destDir string, files []string) error { // remove existing skill directory for clean install if err := os.RemoveAll(destDir); err != nil { return fmt.Errorf("failed to remove existing skill: %w", err) @@ -312,6 +602,24 @@ func installSkill(ctx context.Context, skillName string) error { } } - cmdio.LogString(ctx, color.GreenString("✓ Installed %q to %s", skillName, destDir)) + return nil +} + +func createSymlink(source, dest string) error { + // ensure parent directory exists + if err := os.MkdirAll(filepath.Dir(dest), 0o755); err != nil { + return fmt.Errorf("failed to create parent directory: %w", err) + } + + // remove existing symlink or directory + if err := os.RemoveAll(dest); err != nil { + return fmt.Errorf("failed to remove existing path: %w", err) + } + + // create symlink + if err := os.Symlink(source, dest); err != nil { + return fmt.Errorf("failed to create symlink: %w", err) + } + return nil } From a6dcea69388de1e970e823af8e53e1f09b9efb3f Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Fri, 23 Jan 2026 14:58:48 +0100 Subject: [PATCH 5/6] aitools: unify agent detection in shared registry - Create agents.Agent struct with detection, skills dir, and optional MCP install - Supported agents: Claude Code, Cursor, Codex CLI, OpenCode, GitHub Copilot, Antigravity - Update skills.go and install.go to use shared registry - Remove duplicated detection logic from claude.go and cursor.go Co-Authored-By: Claude Opus 4.5 --- experimental/aitools/cmd/install.go | 72 ++++---- experimental/aitools/cmd/skills.go | 207 ++-------------------- experimental/aitools/lib/agents/agents.go | 123 +++++++++++++ experimental/aitools/lib/agents/claude.go | 11 +- experimental/aitools/lib/agents/cursor.go | 40 +---- 5 files changed, 175 insertions(+), 278 deletions(-) create mode 100644 experimental/aitools/lib/agents/agents.go diff --git a/experimental/aitools/cmd/install.go b/experimental/aitools/cmd/install.go index 90b98eedc0..159a80b81b 100644 --- a/experimental/aitools/cmd/install.go +++ b/experimental/aitools/cmd/install.go @@ -30,25 +30,24 @@ func runInstall(ctx context.Context) error { // Check for non-interactive mode with agent detection // If running in an AI agent, install automatically without prompts if !cmdio.IsPromptSupported(ctx) { + var targetAgent *agents.Agent switch agent.Product(ctx) { case agent.ClaudeCode: - if err := agents.InstallClaude(); err != nil { - return err - } - cmdio.LogString(ctx, color.GreenString("✓ Installed Databricks MCP server for Claude Code")) - cmdio.LogString(ctx, color.YellowString("⚠️ Please restart Claude Code for changes to take effect")) - return nil + targetAgent = agents.GetByName("claude-code") case agent.Cursor: - if err := agents.InstallCursor(); err != nil { + targetAgent = agents.GetByName("cursor") + } + + if targetAgent != nil && targetAgent.InstallMCP != nil { + if err := targetAgent.InstallMCP(); err != nil { return err } - cmdio.LogString(ctx, color.GreenString("✓ Installed Databricks MCP server for Cursor")) - cmdio.LogString(ctx, color.YellowString("⚠️ Please restart Cursor for changes to take effect")) + cmdio.LogString(ctx, color.GreenString("✓ Installed Databricks MCP server for %s", targetAgent.DisplayName)) + cmdio.LogString(ctx, color.YellowString("⚠️ Please restart %s for changes to take effect", targetAgent.DisplayName)) return nil - default: - // Unknown agent in non-interactive mode - show manual instructions - return agents.ShowCustomInstructions(ctx) } + // Unknown agent in non-interactive mode - show manual instructions + return agents.ShowCustomInstructions(ctx) } cmdio.LogString(ctx, "") @@ -69,39 +68,32 @@ func runInstall(ctx context.Context) error { anySuccess := false - ans, err := cmdio.AskSelect(ctx, "Install for Claude Code?", []string{"yes", "no"}) - if err != nil { - return err - } - if ans == "yes" { - fmt.Fprint(os.Stderr, "Installing MCP server for Claude Code...") - if err := agents.InstallClaude(); err != nil { - fmt.Fprint(os.Stderr, "\r"+color.YellowString("⊘ Skipped Claude Code: "+err.Error())+"\n") - } else { - fmt.Fprint(os.Stderr, "\r"+color.GreenString("✓ Installed for Claude Code")+" \n") - anySuccess = true + // Install for agents that have MCP support + for i := range agents.Registry { + a := &agents.Registry[i] + if a.InstallMCP == nil { + continue } - cmdio.LogString(ctx, "") - } - ans, err = cmdio.AskSelect(ctx, "Install for Cursor?", []string{"yes", "no"}) - if err != nil { - return err - } - if ans == "yes" { - fmt.Fprint(os.Stderr, "Installing MCP server for Cursor...") - if err := agents.InstallCursor(); err != nil { - fmt.Fprint(os.Stderr, "\r"+color.YellowString("⊘ Skipped Cursor: "+err.Error())+"\n") - } else { - // Brief delay so users see the "Installing..." message before it's replaced - time.Sleep(1 * time.Second) - fmt.Fprint(os.Stderr, "\r"+color.GreenString("✓ Installed for Cursor")+" \n") - anySuccess = true + ans, err := cmdio.AskSelect(ctx, fmt.Sprintf("Install for %s?", a.DisplayName), []string{"yes", "no"}) + if err != nil { + return err + } + if ans == "yes" { + fmt.Fprintf(os.Stderr, "Installing MCP server for %s...", a.DisplayName) + if err := a.InstallMCP(); err != nil { + fmt.Fprint(os.Stderr, "\r"+color.YellowString("⊘ Skipped %s: %s", a.DisplayName, err.Error())+"\n") + } else { + // Brief delay so users see the "Installing..." message before it's replaced + time.Sleep(500 * time.Millisecond) + fmt.Fprint(os.Stderr, "\r"+color.GreenString("✓ Installed for %s", a.DisplayName)+" \n") + anySuccess = true + } + cmdio.LogString(ctx, "") } - cmdio.LogString(ctx, "") } - ans, err = cmdio.AskSelect(ctx, "Show manual installation instructions for other agents?", []string{"yes", "no"}) + ans, err := cmdio.AskSelect(ctx, "Show manual installation instructions for other agents?", []string{"yes", "no"}) if err != nil { return err } diff --git a/experimental/aitools/cmd/skills.go b/experimental/aitools/cmd/skills.go index 5afe94aebb..40cbf81ce3 100644 --- a/experimental/aitools/cmd/skills.go +++ b/experimental/aitools/cmd/skills.go @@ -9,10 +9,10 @@ import ( "os" "os/exec" "path/filepath" - "runtime" "strings" "time" + "github.com/databricks/cli/experimental/aitools/lib/agents" "github.com/databricks/cli/libs/cmdio" "github.com/fatih/color" "github.com/spf13/cobra" @@ -59,191 +59,6 @@ func addGitHubAuth(req *http.Request) { } } -// AgentConfig defines how to detect and install skills for an agent. -type AgentConfig struct { - Name string - DisplayName string - GlobalSkillDir func() (string, error) // returns global skills directory path - Detect func() bool // returns true if agent is installed -} - -// getHomeDir returns home directory, handling Windows USERPROFILE. -func getHomeDir() (string, error) { - if runtime.GOOS == "windows" { - if userProfile := os.Getenv("USERPROFILE"); userProfile != "" { - return userProfile, nil - } - } - return os.UserHomeDir() -} - -// supportedAgents defines all agents we can install skills to. -var supportedAgents = []AgentConfig{ - { - Name: "claude-code", - DisplayName: "Claude Code", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".claude", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".claude")) - return err == nil - }, - }, - { - Name: "cursor", - DisplayName: "Cursor", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".cursor", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".cursor")) - return err == nil - }, - }, - { - Name: "windsurf", - DisplayName: "Windsurf", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".codeium", "windsurf", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".codeium", "windsurf")) - return err == nil - }, - }, - { - Name: "cline", - DisplayName: "Cline", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".cline", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".cline")) - return err == nil - }, - }, - { - Name: "roo-code", - DisplayName: "Roo Code", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".roo-code", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".roo-code")) - return err == nil - }, - }, - { - Name: "codex", - DisplayName: "Codex CLI", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".codex", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".codex")) - return err == nil - }, - }, - { - Name: "amp", - DisplayName: "Amp", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".amp", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".amp")) - return err == nil - }, - }, - { - Name: "opencode", - DisplayName: "OpenCode", - GlobalSkillDir: func() (string, error) { - home, err := getHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".config", "opencode", "skills"), nil - }, - Detect: func() bool { - home, err := getHomeDir() - if err != nil { - return false - } - _, err = os.Stat(filepath.Join(home, ".config", "opencode")) - return err == nil - }, - }, -} - -// detectInstalledAgents returns list of agents that are installed on the system. -func detectInstalledAgents() []AgentConfig { - var installed []AgentConfig - for _, agent := range supportedAgents { - if agent.Detect() { - installed = append(installed, agent) - } - } - return installed -} - type Manifest struct { Version string `json:"version"` UpdatedAt string `json:"updated_at"` @@ -288,7 +103,7 @@ Skills are installed globally to each agent's skills directory. When multiple agents are detected, skills are stored in a canonical location and symlinked to each agent to avoid duplication. -Supported agents: Claude Code, Cursor, Windsurf, Cline, Roo Code, Codex CLI, Amp, OpenCode`, +Supported agents: Claude Code, Cursor, Codex CLI, OpenCode, GitHub Copilot, Antigravity`, RunE: func(cmd *cobra.Command, args []string) error { if len(args) > 0 { return installSkill(cmd.Context(), args[0]) @@ -500,19 +315,19 @@ func installSkill(ctx context.Context, skillName string) error { return fmt.Errorf("skill %q not found", skillName) } - // detect installed agents - agents := detectInstalledAgents() - if len(agents) == 0 { + // detect installed agents using shared registry + detectedAgents := agents.DetectInstalled() + if len(detectedAgents) == 0 { cmdio.LogString(ctx, color.YellowString("No supported coding agents detected.")) cmdio.LogString(ctx, "") - cmdio.LogString(ctx, "Supported agents: Claude Code, Cursor, Windsurf, Cline, Roo Code, Codex CLI, Amp, OpenCode") + cmdio.LogString(ctx, "Supported agents: Claude Code, Cursor, Codex CLI, OpenCode, GitHub Copilot, Antigravity") cmdio.LogString(ctx, "Please install at least one coding agent first.") return nil } // print detected agents cmdio.LogString(ctx, "Detected coding agents:") - for _, agent := range agents { + for _, agent := range detectedAgents { cmdio.LogString(ctx, " - "+agent.DisplayName) } cmdio.LogString(ctx, "") @@ -523,13 +338,13 @@ func installSkill(ctx context.Context, skillName string) error { return fmt.Errorf("failed to list skill files: %w", err) } - homeDir, err := getHomeDir() + homeDir, err := os.UserHomeDir() if err != nil { return fmt.Errorf("failed to get home directory: %w", err) } // determine installation strategy - useSymlinks := len(agents) > 1 + useSymlinks := len(detectedAgents) > 1 var canonicalDir string if useSymlinks { @@ -541,8 +356,8 @@ func installSkill(ctx context.Context, skillName string) error { } // install/symlink to each agent - for _, agent := range agents { - agentSkillDir, err := agent.GlobalSkillDir() + for _, agent := range detectedAgents { + agentSkillDir, err := agent.SkillsDir() if err != nil { cmdio.LogString(ctx, color.YellowString("⊘ Skipped %s: %v", agent.DisplayName, err)) continue diff --git a/experimental/aitools/lib/agents/agents.go b/experimental/aitools/lib/agents/agents.go new file mode 100644 index 0000000000..be53983aab --- /dev/null +++ b/experimental/aitools/lib/agents/agents.go @@ -0,0 +1,123 @@ +package agents + +import ( + "os" + "path/filepath" + "runtime" +) + +// Agent defines a coding agent that can have skills installed and optionally MCP server. +type Agent struct { + Name string + DisplayName string + // ConfigDir returns the agent's config directory (e.g., ~/.claude). + // Used for detection and as base for skills directory. + ConfigDir func() (string, error) + // SkillsSubdir is the subdirectory within ConfigDir for skills (default: "skills"). + SkillsSubdir string + // InstallMCP installs the Databricks MCP server for this agent. + // Nil if agent doesn't support MCP or we haven't implemented it. + InstallMCP func() error +} + +// Detected returns true if the agent is installed on the system. +func (a *Agent) Detected() bool { + dir, err := a.ConfigDir() + if err != nil { + return false + } + _, err = os.Stat(dir) + return err == nil +} + +// SkillsDir returns the full path to the agent's skills directory. +func (a *Agent) SkillsDir() (string, error) { + configDir, err := a.ConfigDir() + if err != nil { + return "", err + } + subdir := a.SkillsSubdir + if subdir == "" { + subdir = "skills" + } + return filepath.Join(configDir, subdir), nil +} + +// getHomeDir returns home directory, handling Windows USERPROFILE. +func getHomeDir() (string, error) { + if runtime.GOOS == "windows" { + if userProfile := os.Getenv("USERPROFILE"); userProfile != "" { + return userProfile, nil + } + } + return os.UserHomeDir() +} + +// homeSubdir returns a function that computes ~/subpath. +func homeSubdir(subpath ...string) func() (string, error) { + return func() (string, error) { + home, err := getHomeDir() + if err != nil { + return "", err + } + parts := append([]string{home}, subpath...) + return filepath.Join(parts...), nil + } +} + +// Registry contains all supported agents. +var Registry = []Agent{ + { + Name: "claude-code", + DisplayName: "Claude Code", + ConfigDir: homeSubdir(".claude"), + InstallMCP: InstallClaude, + }, + { + Name: "cursor", + DisplayName: "Cursor", + ConfigDir: homeSubdir(".cursor"), + InstallMCP: InstallCursor, + }, + { + Name: "codex", + DisplayName: "Codex CLI", + ConfigDir: homeSubdir(".codex"), + }, + { + Name: "opencode", + DisplayName: "OpenCode", + ConfigDir: homeSubdir(".config", "opencode"), + }, + { + Name: "copilot", + DisplayName: "GitHub Copilot", + ConfigDir: homeSubdir(".copilot"), + }, + { + Name: "antigravity", + DisplayName: "Antigravity", + ConfigDir: homeSubdir(".gemini", "antigravity"), + }, +} + +// DetectInstalled returns all agents that are installed on the system. +func DetectInstalled() []*Agent { + var installed []*Agent + for i := range Registry { + if Registry[i].Detected() { + installed = append(installed, &Registry[i]) + } + } + return installed +} + +// GetByName returns an agent by name, or nil if not found. +func GetByName(name string) *Agent { + for i := range Registry { + if Registry[i].Name == name { + return &Registry[i] + } + } + return nil +} diff --git a/experimental/aitools/lib/agents/claude.go b/experimental/aitools/lib/agents/claude.go index d5207dd887..9beffaed83 100644 --- a/experimental/aitools/lib/agents/claude.go +++ b/experimental/aitools/lib/agents/claude.go @@ -7,16 +7,11 @@ import ( "os/exec" ) -// DetectClaude checks if Claude Code CLI is installed and available on PATH. -func DetectClaude() bool { - _, err := exec.LookPath("claude") - return err == nil -} - // InstallClaude installs the Databricks AI Tools MCP server in Claude Code. func InstallClaude() error { - if !DetectClaude() { - return errors.New("claude Code CLI is not installed or not on PATH\n\nPlease install Claude Code and ensure 'claude' is available on your system PATH.\nFor installation instructions, visit: https://docs.anthropic.com/en/docs/claude-code") + // Check if claude CLI is available + if _, err := exec.LookPath("claude"); err != nil { + return errors.New("'claude' CLI is not installed or not on PATH\n\nPlease install Claude Code and ensure 'claude' is available on your system PATH.\nFor installation instructions, visit: https://docs.anthropic.com/en/docs/claude-code") } databricksPath, err := os.Executable() diff --git a/experimental/aitools/lib/agents/cursor.go b/experimental/aitools/lib/agents/cursor.go index 7303953069..16effbe062 100644 --- a/experimental/aitools/lib/agents/cursor.go +++ b/experimental/aitools/lib/agents/cursor.go @@ -5,7 +5,6 @@ import ( "fmt" "os" "path/filepath" - "runtime" ) type cursorConfig struct { @@ -18,47 +17,20 @@ type mcpServer struct { Env map[string]string `json:"env,omitempty"` } -func getCursorConfigPath() (string, error) { - if runtime.GOOS == "windows" { - userProfile := os.Getenv("USERPROFILE") - if userProfile == "" { - return "", os.ErrNotExist - } - return filepath.Join(userProfile, ".cursor", "mcp.json"), nil - } - - home, err := os.UserHomeDir() - if err != nil { - return "", err - } - return filepath.Join(home, ".cursor", "mcp.json"), nil -} - -// DetectCursor checks if Cursor is installed by looking for its config directory. -func DetectCursor() bool { - configPath, err := getCursorConfigPath() - if err != nil { - return false - } - // Check if the .cursor directory exists (not just the mcp.json file) - cursorDir := filepath.Dir(configPath) - _, err = os.Stat(cursorDir) - return err == nil -} - // InstallCursor installs the Databricks AI Tools MCP server in Cursor. func InstallCursor() error { - configPath, err := getCursorConfigPath() + configDir, err := homeSubdir(".cursor")() if err != nil { return fmt.Errorf("failed to determine Cursor config path: %w", err) } - // Check if .cursor directory exists (not the file, we'll create that if needed) - cursorDir := filepath.Dir(configPath) - if _, err := os.Stat(cursorDir); err != nil { - return fmt.Errorf("cursor directory not found at: %s\n\nPlease install Cursor from: https://cursor.sh", cursorDir) + // Check if .cursor directory exists + if _, err := os.Stat(configDir); err != nil { + return fmt.Errorf(".cursor directory not found at: %s\n\nPlease install Cursor from: https://cursor.sh", configDir) } + configPath := filepath.Join(configDir, "mcp.json") + // Read existing config var config cursorConfig data, err := os.ReadFile(configPath) From e7aa35454129cc53fe95d7159dac0912ca11a8b2 Mon Sep 17 00:00:00 2001 From: Arseny Kravchenko Date: Fri, 23 Jan 2026 15:08:52 +0100 Subject: [PATCH 6/6] fix Antigravity skills path to global_skills --- experimental/aitools/lib/agents/agents.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/experimental/aitools/lib/agents/agents.go b/experimental/aitools/lib/agents/agents.go index be53983aab..7fcf8e8df5 100644 --- a/experimental/aitools/lib/agents/agents.go +++ b/experimental/aitools/lib/agents/agents.go @@ -95,9 +95,10 @@ var Registry = []Agent{ ConfigDir: homeSubdir(".copilot"), }, { - Name: "antigravity", - DisplayName: "Antigravity", - ConfigDir: homeSubdir(".gemini", "antigravity"), + Name: "antigravity", + DisplayName: "Antigravity", + ConfigDir: homeSubdir(".gemini", "antigravity"), + SkillsSubdir: "global_skills", }, }