diff --git a/cli/azd/.vscode/cspell.yaml b/cli/azd/.vscode/cspell.yaml index 01588050330..caf26c677ac 100644 --- a/cli/azd/.vscode/cspell.yaml +++ b/cli/azd/.vscode/cspell.yaml @@ -45,6 +45,7 @@ words: - jsonschema - rustc - figspec + - finetune - bubbletea - lipgloss - gopxl diff --git a/cli/azd/extensions/azure.ai.finetune/CHANGELOG.md b/cli/azd/extensions/azure.ai.finetune/CHANGELOG.md new file mode 100644 index 00000000000..fe26d749ffa --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/CHANGELOG.md @@ -0,0 +1,3 @@ +# Release History + +## 0.0.1 - Initial Version diff --git a/cli/azd/extensions/azure.ai.finetune/README.md b/cli/azd/extensions/azure.ai.finetune/README.md new file mode 100644 index 00000000000..fc3b38c37eb --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/README.md @@ -0,0 +1,3 @@ +# `azd` Finetune Extension + +An AZD Finetune extension diff --git a/cli/azd/extensions/azure.ai.finetune/build.ps1 b/cli/azd/extensions/azure.ai.finetune/build.ps1 new file mode 100644 index 00000000000..5ceb60a8bbc --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/build.ps1 @@ -0,0 +1,78 @@ +# Ensure script fails on any error +$ErrorActionPreference = 'Stop' + +# Get the directory of the script +$EXTENSION_DIR = Split-Path -Parent $MyInvocation.MyCommand.Path + +# Change to the script directory +Set-Location -Path $EXTENSION_DIR + +# Create a safe version of EXTENSION_ID replacing dots with dashes +$EXTENSION_ID_SAFE = $env:EXTENSION_ID -replace '\.', '-' + +# Define output directory +$OUTPUT_DIR = if ($env:OUTPUT_DIR) { $env:OUTPUT_DIR } else { Join-Path $EXTENSION_DIR "bin" } + +# Create output directory if it doesn't exist +if (-not (Test-Path -Path $OUTPUT_DIR)) { + New-Item -ItemType Directory -Path $OUTPUT_DIR | Out-Null +} + +# Get Git commit hash and build date +$COMMIT = git rev-parse HEAD +if ($LASTEXITCODE -ne 0) { + Write-Host "Error: Failed to get git commit hash" + exit 1 +} +$BUILD_DATE = (Get-Date -Format "yyyy-MM-ddTHH:mm:ssZ") + +# List of OS and architecture combinations +if ($env:EXTENSION_PLATFORM) { + $PLATFORMS = @($env:EXTENSION_PLATFORM) +} +else { + $PLATFORMS = @( + "windows/amd64", + "windows/arm64", + "darwin/amd64", + "darwin/arm64", + "linux/amd64", + "linux/arm64" + ) +} + +$APP_PATH = "$env:EXTENSION_ID/internal/cmd" + +# Loop through platforms and build +foreach ($PLATFORM in $PLATFORMS) { + $OS, $ARCH = $PLATFORM -split '/' + + $OUTPUT_NAME = Join-Path $OUTPUT_DIR "$EXTENSION_ID_SAFE-$OS-$ARCH" + + if ($OS -eq "windows") { + $OUTPUT_NAME += ".exe" + } + + Write-Host "Building for $OS/$ARCH..." + + # Delete the output file if it already exists + if (Test-Path -Path $OUTPUT_NAME) { + Remove-Item -Path $OUTPUT_NAME -Force + } + + # Set environment variables for Go build + $env:GOOS = $OS + $env:GOARCH = $ARCH + + go build ` + -ldflags="-X '$APP_PATH.Version=$env:EXTENSION_VERSION' -X '$APP_PATH.Commit=$COMMIT' -X '$APP_PATH.BuildDate=$BUILD_DATE'" ` + -o $OUTPUT_NAME + + if ($LASTEXITCODE -ne 0) { + Write-Host "An error occurred while building for $OS/$ARCH" + exit 1 + } +} + +Write-Host "Build completed successfully!" +Write-Host "Binaries are located in the $OUTPUT_DIR directory." diff --git a/cli/azd/extensions/azure.ai.finetune/build.sh b/cli/azd/extensions/azure.ai.finetune/build.sh new file mode 100644 index 00000000000..f1a995ec5e9 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/build.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +# Get the directory of the script +EXTENSION_DIR="$(cd "$(dirname "$0")" && pwd)" + +# Change to the script directory +cd "$EXTENSION_DIR" || exit + +# Create a safe version of EXTENSION_ID replacing dots with dashes +EXTENSION_ID_SAFE="${EXTENSION_ID//./-}" + +# Define output directory +OUTPUT_DIR="${OUTPUT_DIR:-$EXTENSION_DIR/bin}" + +# Create output and target directories if they don't exist +mkdir -p "$OUTPUT_DIR" + +# Get Git commit hash and build date +COMMIT=$(git rev-parse HEAD) +BUILD_DATE=$(date -u +%Y-%m-%dT%H:%M:%SZ) + +# List of OS and architecture combinations +if [ -n "$EXTENSION_PLATFORM" ]; then + PLATFORMS=("$EXTENSION_PLATFORM") +else + PLATFORMS=( + "windows/amd64" + "windows/arm64" + "darwin/amd64" + "darwin/arm64" + "linux/amd64" + "linux/arm64" + ) +fi + +APP_PATH="$EXTENSION_ID/internal/cmd" + +# Loop through platforms and build +for PLATFORM in "${PLATFORMS[@]}"; do + OS=$(echo "$PLATFORM" | cut -d'/' -f1) + ARCH=$(echo "$PLATFORM" | cut -d'/' -f2) + + OUTPUT_NAME="$OUTPUT_DIR/$EXTENSION_ID_SAFE-$OS-$ARCH" + + if [ "$OS" = "windows" ]; then + OUTPUT_NAME+='.exe' + fi + + echo "Building for $OS/$ARCH..." + + # Delete the output file if it already exists + [ -f "$OUTPUT_NAME" ] && rm -f "$OUTPUT_NAME" + + # Set environment variables for Go build + GOOS=$OS GOARCH=$ARCH go build \ + -ldflags="-X '$APP_PATH.Version=$EXTENSION_VERSION' -X '$APP_PATH.Commit=$COMMIT' -X '$APP_PATH.BuildDate=$BUILD_DATE'" \ + -o "$OUTPUT_NAME" + + if [ $? -ne 0 ]; then + echo "An error occurred while building for $OS/$ARCH" + exit 1 + fi +done + +echo "Build completed successfully!" +echo "Binaries are located in the $OUTPUT_DIR directory." diff --git a/cli/azd/extensions/azure.ai.finetune/ci-build.ps1 b/cli/azd/extensions/azure.ai.finetune/ci-build.ps1 new file mode 100644 index 00000000000..fafe8a5b969 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/ci-build.ps1 @@ -0,0 +1,142 @@ +param( + [string] $Version = (Get-Content "$PSScriptRoot/version.txt"), + [string] $SourceVersion = (git rev-parse HEAD), + [switch] $CodeCoverageEnabled, + [switch] $BuildRecordMode, + [string] $MSYS2Shell, # path to msys2_shell.cmd + [string] $OutputFileName +) + +# Remove any previously built binaries +go clean + +if ($LASTEXITCODE) { + Write-Host "Error running go clean" + exit $LASTEXITCODE +} + +# Run `go help build` to obtain detailed information about `go build` flags. +$buildFlags = @( + # remove all file system paths from the resulting executable. + # Instead of absolute file system paths, the recorded file names + # will begin either a module path@version (when using modules), + # or a plain import path (when using the standard library, or GOPATH). + "-trimpath", + + # Use buildmode=pie (Position Independent Executable) for enhanced security across platforms + # against memory corruption exploits across all major platforms. + # + # On Windows, the -buildmode=pie flag enables Address Space Layout + # Randomization (ASLR) and automatically sets DYNAMICBASE and HIGH-ENTROPY-VA flags in the PE header. + "-buildmode=pie" +) + +if ($CodeCoverageEnabled) { + $buildFlags += "-cover" +} + +# Build constraint tags +# cfi: Enable Control Flow Integrity (CFI), +# cfg: Enable Control Flow Guard (CFG), +# osusergo: Optimize for OS user accounts +$tagsFlag = "-tags=cfi,cfg,osusergo" + +# ld linker flags +# -s: Omit symbol table and debug information +# -w: Omit DWARF symbol table +# -X: Set variable at link time. Used to set the version in source. + +$ldFlag = "-ldflags=-s -w -X 'azure.ai.finetune/internal/cmd.Version=$Version' -X 'azure.ai.finetune/internal/cmd.Commit=$SourceVersion' -X 'azure.ai.finetune/internal/cmd.BuildDate=$(Get-Date -Format o)' " + +if ($IsWindows) { + $msg = "Building for Windows" + Write-Host $msg +} +elseif ($IsLinux) { + Write-Host "Building for linux" +} +elseif ($IsMacOS) { + Write-Host "Building for macOS" +} + +# Add output file flag based on specified output file name +$outputFlag = "-o=$OutputFileName" + +# collect flags +$buildFlags += @( + $tagsFlag, + $ldFlag, + $outputFlag +) + +function PrintFlags() { + param( + [string] $flags + ) + + # Attempt to format flags so that they are easily copy-pastable to be ran inside pwsh + $i = 0 + foreach ($buildFlag in $buildFlags) { + # If the flag has a value, wrap it in quotes. This is not required when invoking directly below, + # but when repasted into a shell for execution, the quotes can help escape special characters such as ','. + $argWithValue = $buildFlag.Split('=', 2) + if ($argWithValue.Length -eq 2 -and !$argWithValue[1].StartsWith("`"")) { + $buildFlag = "$($argWithValue[0])=`"$($argWithValue[1])`"" + } + + # Write each flag on a newline with '`' acting as the multiline separator + if ($i -eq $buildFlags.Length - 1) { + Write-Host " $buildFlag" + } + else { + Write-Host " $buildFlag ``" + } + $i++ + } +} + +$oldGOEXPERIMENT = $env:GOEXPERIMENT +# Enable the loopvar experiment, which makes the loop variaible for go loops like `range` behave as most folks would expect. +# the go team is exploring making this default in the future, and we'd like to opt into the behavior now. +$env:GOEXPERIMENT = "loopvar" + +try { + Write-Host "Running: go build ``" + PrintFlags -flags $buildFlags + go build @buildFlags + if ($LASTEXITCODE) { + Write-Host "Error running go build" + exit $LASTEXITCODE + } + + if ($BuildRecordMode) { + # Modify build tags to include record + $recordTagPatched = $false + for ($i = 0; $i -lt $buildFlags.Length; $i++) { + if ($buildFlags[$i].StartsWith("-tags=")) { + $buildFlags[$i] += ",record" + $recordTagPatched = $true + } + } + if (-not $recordTagPatched) { + $buildFlags += "-tags=record" + } + # Add output file flag for record mode + $recordOutput = "-o=$OutputFileName-record" + if ($IsWindows) { $recordOutput += ".exe" } + $buildFlags += $recordOutput + + Write-Host "Running: go build (record) ``" + PrintFlags -flags $buildFlags + go build @buildFlags + if ($LASTEXITCODE) { + Write-Host "Error running go build (record)" + exit $LASTEXITCODE + } + } + + Write-Host "go build succeeded" +} +finally { + $env:GOEXPERIMENT = $oldGOEXPERIMENT +} \ No newline at end of file diff --git a/cli/azd/extensions/azure.ai.finetune/cspell.yaml b/cli/azd/extensions/azure.ai.finetune/cspell.yaml new file mode 100644 index 00000000000..6f2423ef132 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/cspell.yaml @@ -0,0 +1,9 @@ +words: + - azureprovider + - finetune + - finetuning + - ftjob + - hyperparameters + - openaiprovider + - openaisdk + - wandb \ No newline at end of file diff --git a/cli/azd/extensions/azure.ai.finetune/design/IMPLEMENTATION_SUMMARY.md b/cli/azd/extensions/azure.ai.finetune/design/IMPLEMENTATION_SUMMARY.md new file mode 100644 index 00000000000..a0aea7735fa --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/design/IMPLEMENTATION_SUMMARY.md @@ -0,0 +1,284 @@ +# Architecture Implementation - Folder Structure & Interfaces + +## Created Folder Structure + +``` +azure.ai.finetune/ +├── pkg/ +│ └── models/ # Domain Models (Shared Foundation) +│ ├── finetune.go # FineTuningJob, JobStatus, CreateFineTuningRequest +│ ├── deployment.go # Deployment, DeploymentStatus, DeploymentRequest +│ ├── errors.go # ErrorDetail, Error codes +│ └── requests.go # All request/response DTOs +│ +├── internal/ +│ ├── services/ # Service Layer (Business Logic) +│ │ ├── interface.go # FineTuningService, DeploymentService interfaces +│ │ ├── state_store.go # StateStore, ErrorTransformer interfaces +│ │ ├── finetune_service.go # FineTuningService implementation (stub) +│ │ └── deployment_service.go # DeploymentService implementation (stub) +│ │ +│ └── providers/ # Provider Layer (SDK Adapters) +│ ├── interface.go # FineTuningProvider, ModelDeploymentProvider interfaces +│ ├── openai/ +│ │ └── provider.go # OpenAI provider implementation (stub) +│ └── azure/ +│ └── provider.go # Azure provider implementation (stub) +│ +├── design/ +│ └── architecture.md # Architecture documentation +└── [existing files unchanged] +``` + +## Files Created + +### 1. Domain Models (pkg/models/) + +#### finetune.go +- `JobStatus` enum: pending, queued, running, succeeded, failed, cancelled, paused +- `FineTuningJob` - main domain model for jobs +- `CreateFineTuningRequest` - request DTO +- `Hyperparameters` - hyperparameter configuration +- `ListFineTuningJobsRequest` - pagination request +- `FineTuningJobDetail` - detailed job info +- `JobEvent` - event information +- `JobCheckpoint` - checkpoint data + +#### deployment.go +- `DeploymentStatus` enum: pending, active, updating, failed, deleting +- `Deployment` - main domain model for deployments +- `DeploymentRequest` - request DTO +- `DeploymentConfig` - configuration for deployments +- `BaseModel` - base model information + +#### errors.go +- `ErrorDetail` - standardized error structure +- Error code constants: INVALID_REQUEST, NOT_FOUND, UNAUTHORIZED, RATE_LIMITED, etc. +- Error method implementation + +#### requests.go +- All request DTOs: PauseJobRequest, ResumeJobRequest, CancelJobRequest, etc. +- ListDeploymentsRequest, GetDeploymentRequest, UpdateDeploymentRequest, etc. + +--- + +### 2. Provider Layer (internal/providers/) + +#### interface.go +Defines two main interfaces: + +**FineTuningProvider Interface** +- `CreateFineTuningJob()` +- `GetFineTuningStatus()` +- `ListFineTuningJobs()` +- `GetFineTuningJobDetails()` +- `GetJobEvents()` +- `GetJobCheckpoints()` +- `PauseJob()` +- `ResumeJob()` +- `CancelJob()` +- `UploadFile()` +- `GetUploadedFile()` + +**ModelDeploymentProvider Interface** +- `DeployModel()` +- `GetDeploymentStatus()` +- `ListDeployments()` +- `UpdateDeployment()` +- `DeleteDeployment()` + +#### openai/provider.go (Stub Implementation) +- `OpenAIProvider` struct +- Implements both `FineTuningProvider` and `ModelDeploymentProvider` +- All methods have TODO comments (ready for implementation) +- Constructor: `NewOpenAIProvider(apiKey, endpoint)` + +#### azure/provider.go (Stub Implementation) +- `AzureProvider` struct +- Implements both `FineTuningProvider` and `ModelDeploymentProvider` +- All methods have TODO comments (ready for implementation) +- Constructor: `NewAzureProvider(endpoint, apiKey)` + +--- + +### 3. Service Layer (internal/services/) + +#### interface.go +Defines two service interfaces: + +**FineTuningService Interface** +- `CreateFineTuningJob()` - with business validation +- `GetFineTuningStatus()` +- `ListFineTuningJobs()` +- `GetFineTuningJobDetails()` +- `GetJobEvents()` - with filtering +- `GetJobCheckpoints()` - with pagination +- `PauseJob()` - with state validation +- `ResumeJob()` - with state validation +- `CancelJob()` - with proper validation +- `UploadTrainingFile()` - with validation +- `UploadValidationFile()` - with validation +- `PollJobUntilCompletion()` - async polling + +**DeploymentService Interface** +- `DeployModel()` - with validation +- `GetDeploymentStatus()` +- `ListDeployments()` +- `UpdateDeployment()` - with validation +- `DeleteDeployment()` - with validation +- `WaitForDeployment()` - timeout support + +#### state_store.go +Defines persistence interfaces: + +**StateStore Interface** +- Job persistence: SaveJob, GetJob, ListJobs, UpdateJobStatus, DeleteJob +- Deployment persistence: SaveDeployment, GetDeployment, ListDeployments, UpdateDeploymentStatus, DeleteDeployment + +**ErrorTransformer Interface** +- `TransformError()` - converts vendor errors to standardized ErrorDetail + +#### finetune_service.go (Stub Implementation) +- `fineTuningServiceImpl` struct +- Implements `FineTuningService` interface +- Constructor: `NewFineTuningService(provider, stateStore)` +- All methods have TODO comments (ready for implementation) +- Takes `FineTuningProvider` and `StateStore` as dependencies + +#### deployment_service.go (Stub Implementation) +- `deploymentServiceImpl` struct +- Implements `DeploymentService` interface +- Constructor: `NewDeploymentService(provider, stateStore)` +- All methods have TODO comments (ready for implementation) +- Takes `ModelDeploymentProvider` and `StateStore` as dependencies + +--- + +## Architecture Verification + +### Import Rules Enforced + +✅ **pkg/models/** - No imports from other layers +- Pure data structures only + +✅ **internal/providers/interface.go** - Only imports models +- Vendor-agnostic interface definitions + +✅ **internal/providers/openai/provider.go** - Can import: +- `pkg/models` (domain models) +- OpenAI SDK (when implemented) + +✅ **internal/providers/azure/provider.go** - Can import: +- `pkg/models` (domain models) +- Azure SDK (when implemented) + +✅ **internal/services/interface.go** - Only imports: +- `pkg/models` +- `context` + +✅ **internal/services/finetune_service.go** - Only imports: +- `pkg/models` +- `internal/providers` (interface, not concrete) +- `internal/services` (own package for StateStore) + +✅ **internal/services/deployment_service.go** - Only imports: +- `pkg/models` +- `internal/providers` (interface, not concrete) +- `internal/services` (own package for StateStore) + +--- + +## Next Steps + +### To Implement Provider Layer: + +1. **OpenAI Provider** (`internal/providers/openai/provider.go`) + - Add OpenAI SDK imports + - Implement domain ↔ SDK conversions + - Fill in method bodies + - Add error transformation logic + +2. **Azure Provider** (`internal/providers/azure/provider.go`) + - Add Azure SDK imports + - Implement domain ↔ SDK conversions + - Fill in method bodies + - Add error transformation logic + +### To Implement Service Layer: + +1. **FineTuningService** (`internal/services/finetune_service.go`) + - Implement validation logic + - Add state persistence calls + - Error transformation + - Fill in method bodies + +2. **DeploymentService** (`internal/services/deployment_service.go`) + - Implement validation logic + - Add state persistence calls + - Error transformation + - Fill in method bodies + +3. **StateStore Implementation** + - File-based storage (JSON files) + - Or in-memory with persistence + +### To Refactor CLI Layer: + +1. Update `internal/cmd/operations.go` + - Remove direct SDK calls + - Use service layer instead + - Inject services via DI + - Format output only + +2. Create command factory + - Initialize providers + - Initialize services + - Pass to command constructors + +--- + +## Key Benefits of This Structure + +✅ **No Existing Files Modified** +- All new files +- Extension to existing code without breaking changes + +✅ **Clear Separation of Concerns** +- Models: Pure data +- Providers: SDK integration +- Services: Business logic +- CLI: User interface + +✅ **Multi-Vendor Ready** +- Add new vendor: Just implement provider interface +- No CLI or service changes needed + +✅ **Testable** +- Mock provider at interface level +- Test services independently +- Integration tests for providers + +✅ **Future Proof** +- Easy to add Anthropic, Cohere, etc. +- Easy to swap implementations +- Easy to add new features + +--- + +## File Summary + +| File | Lines | Purpose | +|------|-------|---------| +| pkg/models/finetune.go | ~100 | Fine-tuning domain models | +| pkg/models/deployment.go | ~80 | Deployment domain models | +| pkg/models/errors.go | ~40 | Error handling models | +| pkg/models/requests.go | ~60 | Request DTOs | +| internal/providers/interface.go | ~70 | Provider interfaces | +| internal/providers/openai/provider.go | ~150 | OpenAI stub (TODO) | +| internal/providers/azure/provider.go | ~150 | Azure stub (TODO) | +| internal/services/interface.go | ~100 | Service interfaces | +| internal/services/state_store.go | ~60 | Persistence interfaces | +| internal/services/finetune_service.go | ~120 | Fine-tuning service stub | +| internal/services/deployment_service.go | ~90 | Deployment service stub | +| **Total** | **~920** | **Complete stub structure** | + diff --git a/cli/azd/extensions/azure.ai.finetune/design/architecture.md b/cli/azd/extensions/azure.ai.finetune/design/architecture.md new file mode 100644 index 00000000000..724bc83e39e --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/design/architecture.md @@ -0,0 +1,677 @@ +# Azure AI Fine-Tune Extension - Low Level Design + +## 1. Overview + +This document describes the proposed three-layer architecture for the Azure AI Fine-Tune CLI extension. The design emphasizes vendor abstraction, separation of concerns, and multi-vendor extensibility. + +### Key Objectives + +- **Phase 1**: Support OpenAI fine-tuning and Azure Cognitive Services model deployment +- **Future Phases**: Onboard additional vendors without refactoring CLI or service layer +- **Testability**: Enable unit testing of business logic independently from SDK implementations +- **Maintainability**: Clear boundaries between layers for easier debugging and feature development + +--- + +## 2. Architecture Overview + +### Complete Layered Architecture with Entities + +``` +┌──────────────────────────────────────────────────────────────────┐ +│ DOMAIN MODELS / ENTITIES │ +│ (pkg/models/ - Shared Foundation) │ +│ │ +│ ├─ FineTuningJob ← All layers read/write these │ +│ ├─ Deployment │ +│ ├─ BaseModel │ +│ ├─ StandardError │ +│ ├─ CreateFineTuningRequest │ +│ └─ DeploymentRequest │ +│ │ +│ (No SDK imports! Pure data structures) │ +└──────────────────────────────────────────────────────────────────┘ + ↑ ↑ ↑ + │ (imports) │ (imports) │ (imports) + │ │ │ +┌───┴──────────────────┐ ┌──────────────┴──────┐ ┌─────────┴───────────┐ +│ CLI Layer │ │ Service Layer │ │ Provider Layer │ +│ (cmd/) │ │ (services/) │ │ (providers/) │ +│ │ │ │ │ │ +│ Uses: │ │ Uses: │ │ Uses: │ +│ - FineTuningJob ✅ │ │ - FineTuningJob ✅ │ │ - FineTuningJob ✅ │ +│ - Deployment ✅ │ │ - Deployment ✅ │ │ - Deployment ✅ │ +│ - Request DTOs ✅ │ │ - Request DTOs ✅ │ │ - Request DTOs ✅ │ +│ │ │ - StandardError ✅ │ │ - StandardError ✅ │ +│ Does: │ │ │ │ │ +│ - Parse input │ │ Does: │ │ Does: │ +│ - Format output │ │ - Validate │ │ - IMPORT SDK ⚠️ │ +│ - Call Service ↓ │ │ - Orchestrate │ │ - Convert domain → │ +│ │ │ - Call Provider ↓ │ │ SDK models │ +│ │ │ - State management │ │ - Call SDK │ +│ │ │ - Error transform │ │ - Convert SDK → │ +│ │ │ │ │ domain models │ +└──────────────────────┘ └─────────────────────┘ └─────────────────────┘ + ↓ + ┌────────────────────────────┴─────────┐ + │ SDK Layer (External) │ + │ │ + │ - OpenAI SDK │ + │ - Azure Cognitive Services SDK │ + │ - Future Vendor SDKs │ + └───────────────────────────────────────┘ +``` + +--- + +## 3. Layer Responsibilities + +### 3.1 Domain Models Layer (pkg/models/) + +**Responsibility**: Define vendor-agnostic data structures used across all layers. + +**Characteristics**: +- Zero SDK imports +- Pure data structures (Go structs) +- Single source of truth for data contracts +- Includes request/response DTOs and error types + +**What it Contains**: +- `FineTuningJob` - represents a fine-tuning job +- `Deployment` - represents a model deployment +- `CreateFineTuningRequest` - request to create a job +- `Hyperparameters` - training hyperparameters +- `ErrorDetail` - standardized error response +- `JobStatus`, `DeploymentStatus` - enums + +**Who Uses It**: All layers (CLI, Service, Provider) + +**Example Structure**: +```go +package models + +type FineTuningJob struct { + ID string + Status JobStatus + BaseModel string + FineTunedModel string + CreatedAt time.Time + CompletedAt *time.Time + VendorJobID string // Vendor-specific ID + VendorMetadata map[string]interface{} // Vendor-specific details + ErrorDetails *ErrorDetail +} + +type JobStatus string +const ( + StatusPending JobStatus = "pending" + StatusTraining JobStatus = "training" + StatusSucceeded JobStatus = "succeeded" + StatusFailed JobStatus = "failed" +) +``` + +--- + +### 3.2 CLI Layer (cmd/) + +**Responsibility**: Handle command parsing, user input validation, output formatting, and orchestration of user interactions. + +**Characteristics**: +- Does NOT import vendor SDKs +- Does NOT contain business logic +- Calls only the Service layer +- Responsible for presentation (table formatting, JSON output, etc.) + +**What it Does**: +- Parse command-line arguments and flags +- Validate user input format and constraints +- Call service methods to perform business logic +- Format responses for terminal output (tables, JSON, etc.) +- Handle error presentation to users +- Support multiple output formats (human-readable, JSON) + +**What it Does NOT Do**: +- Call SDK methods directly +- Implement business logic (validation, state management) +- Transform between vendor models +- Manage long-running operations (polling is in Service layer) + +**Imports**: +```go +import ( + "azure.ai.finetune/pkg/models" + "azure.ai.finetune/internal/services" + "github.com/spf13/cobra" // CLI framework +) +``` + +**Example**: +```go +func newOperationSubmitCommand(svc services.FineTuningService) *cobra.Command { + return &cobra.Command{ + Use: "submit", + Short: "Submit fine tuning job", + RunE: func(cmd *cobra.Command, args []string) error { + // 1. Parse input + req := &models.CreateFineTuningRequest{ + BaseModel: parseBaseModel(args), + TrainingDataID: parseTrainingFile(args), + } + + // 2. Call service (business logic) + job, err := svc.CreateFineTuningJob(cmd.Context(), req) + if err != nil { + return err + } + + // 3. Format output + printFineTuningJobTable(job) + return nil + }, + } +} +``` + +--- + +### 3.3 Service Layer (internal/services/) + +**Responsibility**: Implement business logic, orchestration, state management, and error standardization. + +**Characteristics**: +- Does NOT import vendor SDKs +- Imports Provider interface (abstraction, not concrete implementations) +- Central location for business rules +- Handles cross-vendor concerns +- Manages job lifecycle and state persistence + +**What it Does**: +- Validate business constraints (e.g., model limits, file sizes) +- Orchestrate multi-step operations +- Call provider methods to perform vendor-specific operations +- Transform vendor-specific errors to standardized `ErrorDetail` +- Manage job state persistence (local storage) +- Implement polling logic for long-running operations +- Handle retries and resilience patterns +- Manage job lifecycle state transitions + +**What it Does NOT Do**: +- Import SDK packages +- Format output for CLI +- Parse command-line arguments +- Call SDK methods directly + +**Key Interfaces**: +```go +type FineTuningProvider interface { + CreateFineTuningJob(ctx context.Context, req *CreateFineTuningRequest) (*FineTuningJob, error) + GetFineTuningStatus(ctx context.Context, jobID string) (*FineTuningJob, error) + ListFineTuningJobs(ctx context.Context) ([]*FineTuningJob, error) +} + +type StateStore interface { + SaveJob(job *FineTuningJob) error + GetJob(id string) (*FineTuningJob, error) + ListJobs() ([]*FineTuningJob, error) + UpdateJobStatus(id string, status JobStatus) error +} +``` + +**Imports**: +```go +import ( + "azure.ai.finetune/pkg/models" + "azure.ai.finetune/internal/providers" + "context" + "fmt" +) +``` + +**Example**: +```go +type FineTuningService struct { + provider providers.FineTuningProvider + stateStore StateStore +} + +func (s *FineTuningService) CreateFineTuningJob( + ctx context.Context, + req *models.CreateFineTuningRequest, +) (*models.FineTuningJob, error) { + // Business logic: validation + if err := s.validateRequest(req); err != nil { + return nil, fmt.Errorf("invalid request: %w", err) + } + + // Call abstracted provider (could be OpenAI, Azure, etc.) + job, err := s.provider.CreateFineTuningJob(ctx, req) + if err != nil { + // Transform vendor error to standard error + return nil, s.transformError(err) + } + + // State management: persist job + s.stateStore.SaveJob(job) + + return job, nil +} +``` + +--- + +### 3.4 Provider Layer (internal/providers/) + +**Responsibility**: Adapter pattern implementation. Bridge between domain models and vendor SDKs. + +**Characteristics**: +- **ONLY layer that imports vendor SDKs** +- Implements vendor-agnostic provider interface +- Converts between domain models and SDK models +- Handles vendor-specific error semantics +- No business logic (pure technical adaptation) + +**What it Does**: +- Import and instantiate vendor SDKs +- Convert domain models → SDK-specific request formats +- Call SDK methods +- Convert SDK response models → domain models +- Handle SDK-specific error codes and map to standard errors +- Manage SDK client lifecycle (initialization, auth) + +**What it Does NOT Do**: +- Implement business logic +- Manage state or persistence +- Format output for CLI +- Make decisions about retry logic or state transitions + +**Provider Interface** (in `internal/providers/interface.go` - No SDK imports!): +```go +package providers + +import ( + "context" + "azure.ai.finetune/pkg/models" +) + +type FineTuningProvider interface { + CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) + GetFineTuningStatus(ctx context.Context, jobID string) (*models.FineTuningJob, error) + ListFineTuningJobs(ctx context.Context) ([]*models.FineTuningJob, error) +} + +type ModelDeploymentProvider interface { + DeployModel(ctx context.Context, req *models.DeploymentRequest) (*models.Deployment, error) + GetDeploymentStatus(ctx context.Context, deploymentID string) (*models.Deployment, error) + DeleteDeployment(ctx context.Context, deploymentID string) error +} +``` + +**OpenAI Provider Example** (imports OpenAI SDK): +```go +package openai + +import ( + "context" + openaisdk "github.com/openai/openai-go" // ⚠️ SDK import! + "azure.ai.finetune/pkg/models" +) + +type OpenAIProvider struct { + client *openaisdk.Client +} + +func (p *OpenAIProvider) CreateFineTuningJob( + ctx context.Context, + req *models.CreateFineTuningRequest, +) (*models.FineTuningJob, error) { + // 1. Convert domain → SDK format + sdkReq := &openaisdk.FineTuningJobCreateParams{ + Model: openaisdk.F(req.BaseModel), + TrainingFile: openaisdk.F(req.TrainingDataID), + } + + // 2. Call SDK + sdkJob, err := p.client.FineTuning.Jobs.Create(ctx, sdkReq) + if err != nil { + return nil, err + } + + // 3. Convert SDK response → domain format + return p.sdkJobToDomain(sdkJob), nil +} + +// Helper: SDK model → domain model +func (p *OpenAIProvider) sdkJobToDomain(sdkJob *openaisdk.FineTuningJob) *models.FineTuningJob { + return &models.FineTuningJob{ + ID: sdkJob.ID, + Status: p.mapStatus(sdkJob.Status), + BaseModel: sdkJob.Model, + FineTunedModel: sdkJob.FineTunedModel, + VendorJobID: sdkJob.ID, + VendorMetadata: p.extractMetadata(sdkJob), + } +} +``` + +**Azure Provider Example** (imports Azure SDK): +```go +package azure + +import ( + "context" + cognitiveservices "github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices" // Different SDK! + "azure.ai.finetune/pkg/models" +) + +type AzureProvider struct { + client *cognitiveservices.Client +} + +func (p *AzureProvider) CreateFineTuningJob( + ctx context.Context, + req *models.CreateFineTuningRequest, +) (*models.FineTuningJob, error) { + // 1. Convert domain → Azure SDK format + sdkReq := p.domainRequestToAzureSDK(req) + + // 2. Call Azure SDK (different from OpenAI!) + sdkJob, err := p.client.CreateFineTuningJob(ctx, sdkReq) + if err != nil { + return nil, err + } + + // 3. Convert Azure SDK response → SAME domain model as OpenAI! + return p.azureJobToDomain(sdkJob), nil +} +``` + +--- + +## 4. Import Dependencies + +### Valid Imports by Layer + +``` +pkg/models/ + ↑ ↑ ↑ + │ imports │ imports │ imports + │ (only) │ (only) │ (only) + │ │ │ +cmd/ services/ providers/ +├─ pkg/models ├─ pkg/models ├─ pkg/models +├─ services/ ├─ providers/ ├─ vendor SDKs ✅ +├─ pkg/config │ interface only └─ Azure SDK +└─ github.com/ │ OpenAI SDK + spf13/cobra └─ github.com/ etc. + context +``` + +### Strict Rules + +| Layer | CAN Import | CANNOT Import | +|-------|---|---| +| **cmd/** | `pkg/models`, `services/`, `pkg/config`, `github.com/spf13/cobra` | Any SDK (openai, azure), `providers/` concrete impl | +| **services/** | `pkg/models`, `providers/` (interface only), `context` | Any SDK, cmd, concrete provider implementations | +| **providers/** | `pkg/models`, vendor SDKs ✅ | cmd, services, other providers | +| **pkg/models/** | Nothing | Anything | + +--- + +## 5. Directory Structure + +``` +azure.ai.finetune/ +├── internal/ +│ ├── cmd/ # CLI Layer +│ │ ├── root.go # Root command +│ │ ├── operations.go # Finetune operations (submit, list, etc.) +│ │ ├── deployment.go # Deployment operations +│ │ └── output.go # Output formatting (tables, JSON) +│ │ +│ ├── services/ # Service Layer +│ │ ├── finetune_service.go # FineTuningService implementation +│ │ ├── deployment_service.go # DeploymentService implementation +│ │ ├── state_store.go # State persistence interface +│ │ └── error_transform.go # Error transformation logic +│ │ +│ ├── providers/ # Provider Layer +│ │ ├── interface.go # FineTuningProvider, ModelDeploymentProvider interfaces +│ │ │ # (NO SDK imports here!) +│ │ ├── openai/ +│ │ │ ├── provider.go # OpenAI implementation (SDK import!) +│ │ │ └── converters.go # Domain ↔ OpenAI SDK conversion +│ │ └── azure/ +│ │ ├── provider.go # Azure implementation (SDK import!) +│ │ └── converters.go # Domain ↔ Azure SDK conversion +│ │ +│ ├── project/ # Project utilities +│ ├── tools/ # Misc utilities +│ └── fine_tuning_yaml/ # YAML parsing +│ +├── pkg/ +│ └── models/ # Domain Models (Shared) +│ ├── finetune.go # FineTuningJob, JobStatus, etc. +│ ├── deployment.go # Deployment, DeploymentStatus, etc. +│ ├── requests.go # Request DTOs (Create, Update, etc.) +│ ├── errors.go # ErrorDetail, StandardError types +│ └── base_model.go # BaseModel, ModelInfo, etc. +│ +├── design/ +│ ├── architecture.md # This file +│ └── sequence_diagrams.md # Interaction flows (future) +│ +├── main.go +├── go.mod +└── README.md +``` + +--- + +## 6. Data Flow Examples + +### 6.1 Create Fine-Tuning Job Flow + +``` +User Command: + azd finetune jobs submit -f config.yaml + + ↓ + +CLI Layer (cmd/operations.go): + 1. Parse arguments + 2. Read config.yaml → CreateFineTuningRequest {BaseModel, TrainingDataID} + 3. Call service.CreateFineTuningJob(ctx, req) + + ↓ + +Service Layer (services/finetune_service.go): + 1. Validate request (model exists, data size valid, etc.) + 2. Get provider from config (OpenAI vs Azure) + 3. Call provider.CreateFineTuningJob(ctx, req) + 4. Transform any errors + 5. Persist job to state store + 6. Return FineTuningJob + + ↓ + +Provider Layer (providers/openai/provider.go): + 1. Convert CreateFineTuningRequest → OpenAI SDK format + 2. Call: client.FineTuning.Jobs.Create(ctx, sdkReq) + 3. Convert OpenAI response → FineTuningJob domain model + 4. Return FineTuningJob + + ↓ + +Service Layer: + Gets FineTuningJob back + Saves to state store + Returns to CLI + + ↓ + +CLI Layer: + Receives FineTuningJob + Formats for output (table or JSON) + Prints: "Job created: ftjob-abc123" + Exit +``` + +### 6.2 Switch Provider (OpenAI → Azure) + +``` +Code Change Needed: + ✅ internal/providers/azure/provider.go (new file) + ✅ internal/config/config.yaml (provider: azure) + ❌ internal/services/finetune_service.go (NO changes!) + ❌ cmd/operations.go (NO changes!) + +Why? + Service layer uses FineTuningProvider interface (abstracted) + CLI doesn't know about providers at all + Only provider layer imports SDK +``` + +### 6.3 Error Flow + +``` +User submits invalid data: + azd finetune jobs submit -f config.yaml + + ↓ + +CLI Layer: + Creates CreateFineTuningRequest from YAML + + ↓ + +Service Layer: + Validates: model not supported + Returns: &ErrorDetail{ + Code: "INVALID_MODEL", + Message: "Model 'gpt-5' not supported", + Retryable: false, + } + + ↓ + +CLI Layer: + Receives ErrorDetail + Prints user-friendly message + Exit with error code +``` + +--- + +## 7. Benefits of This Architecture + +### 7.1 Vendor Abstraction +- **Add new vendor**: Create `internal/providers/{vendor}/provider.go` +- **CLI changes**: None +- **Service changes**: None +- **Dependencies**: Only provider layer implementation + +### 7.2 Testability +- **Test business logic**: Mock provider at interface level +- **Test CLI**: Mock service +- **Test provider**: Use SDK directly (integration tests) + +### 7.3 Separation of Concerns +- **CLI**: What to show and how +- **Service**: What to do and how to do it (business rules) +- **Provider**: How to talk to vendor SDKs + +### 7.4 Maintainability +- **Vendor SDK updates**: Changes only in provider layer +- **Business logic changes**: Changes in service layer +- **Output format changes**: Changes in CLI layer + +### 7.5 Future Flexibility +- **Support multiple vendors simultaneously**: Multiple provider implementations +- **Provider selection at runtime**: Config-driven +- **A/B testing different implementations**: Easy switching + +--- + +## 8. Design Patterns Used + +### 8.1 Strategy Pattern +**Where**: Provider interface +``` +FineTuningProvider interface (strategy) +├── OpenAIProvider (concrete strategy) +├── AzureProvider (concrete strategy) +└── AnthropicProvider (future strategy) + +Service uses any strategy without knowing which +``` + +### 8.2 Adapter Pattern +**Where**: Provider implementations +- Convert domain models ↔ SDK models +- Standardize error responses + +### 8.3 Dependency Injection +**Where**: Service receives provider via constructor +```go +type FineTuningService struct { + provider providers.FineTuningProvider // Injected +} +``` + +### 8.4 Repository Pattern +**Where**: State persistence +```go +type StateStore interface { + SaveJob(job *FineTuningJob) error + GetJob(id string) (*FineTuningJob, error) +} +``` + +--- + +## 9. Phase 1 Implementation Checklist + +- [ ] Create `pkg/models/` with all domain models +- [ ] Create `internal/services/finetune_service.go` with interfaces +- [ ] Create `internal/services/deployment_service.go` with interfaces +- [ ] Create `internal/providers/interface.go` with provider interfaces +- [ ] Create `internal/providers/openai/provider.go` (OpenAI SDK) +- [ ] Create `internal/providers/azure/provider.go` (Azure SDK) +- [ ] Refactor `cmd/operations.go` to use service layer +- [ ] Create state store implementation (file or in-memory) +- [ ] Create unit tests for service layer +- [ ] Create integration tests for providers + +--- + +## 10. Future Considerations + +### 10.1 Phase 2: Additional Vendors +- Add `internal/providers/anthropic/provider.go` +- Add `internal/providers/cohere/provider.go` +- Service and CLI remain unchanged + +### 10.2 Async Job Tracking +- Service layer implements polling logic +- CLI supports `azd finetune jobs status ` +- Long-running operations tracked across sessions + +### 10.3 Webhook Support +- Service layer could support push notifications +- Provider layer handles webhook registration with vendor + +### 10.4 Cost Tracking +- Service layer accumulates cost metadata from providers +- CLI displays cost information + +--- + +## Questions for Team Discussion + +1. **State Persistence**: File-based or database-backed state store? +2. **Configuration**: YAML in project root or environment variables? +3. **Async Polling**: Should it run in background or user-initiated? +4. **Error Handling**: Retry logic - exponential backoff or fixed intervals? +5. **Testing**: Unit test requirements for service and provider layers? + diff --git a/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-dpo.yaml b/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-dpo.yaml new file mode 100644 index 00000000000..045abcf2fc5 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-dpo.yaml @@ -0,0 +1,24 @@ +# Example: Direct Preference Optimization (DPO) Configuration +# Use this for preference-based fine-tuning with preferred vs non-preferred outputs + +model: gpt-4o-mini +training_file: "local:./dpo_training_data.jsonl" + +# Optional: Validation data for monitoring +validation_file: "local:./dpo_validation_data.jsonl" + +suffix: "dpo-optimized" + +# DPO method configuration +method: + type: dpo + dpo: + hyperparameters: + epochs: 2 + batch_size: 16 + learning_rate_multiplier: 0.5 + beta: 0.1 # Temperature parameter for DPO (can be float or "auto") + +metadata: + project: "preference-tuning" + model-type: "dpo" diff --git a/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-reinforcement.yaml b/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-reinforcement.yaml new file mode 100644 index 00000000000..db9df5b261f --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-reinforcement.yaml @@ -0,0 +1,35 @@ +# Example: Reinforcement Learning Fine-Tuning Configuration +# Use for reinforcement learning with reward model or grader-based evaluation + +model: gpt-4o-mini +training_file: "local:./rl_training_data.jsonl" + +# Optional: Validation data +validation_file: "local:./rl_validation_data.jsonl" + +suffix: "rl-trained" +seed: 42 + +# Reinforcement learning method configuration +method: + type: reinforcement + reinforcement: + hyperparameters: + epochs: 3 + batch_size: 8 + learning_rate_multiplier: 1.0 + beta: 0.5 # Weighting for RL reward signal + compute_multiplier: 1.0 # Training computation budget multiplier + reasoning_effort: high # Can be: low, medium, high + + # Grader configuration for reward evaluation + grader: + type: string_check # Grader type for string-based criteria + grader_config: + criteria: "answer contains correct chemical formula" + expected_pattern: "formula_pattern" + +metadata: + project: "reinforcement-learning" + training-type: "reward-based" + grader-version: "v1" diff --git a/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-supervised.yaml b/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-supervised.yaml new file mode 100644 index 00000000000..b009e51a815 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/examples/fine-tuning-supervised.yaml @@ -0,0 +1,34 @@ +# Example: Supervised Fine-Tuning Configuration +# Use this for standard supervised learning tasks + +model: gpt-4o-mini +training_file: "local:./training_data.jsonl" +validation_file: "local:./validation_data.jsonl" + +# Optional: Custom suffix for fine-tuned model name +suffix: "my-custom-model" + +# Optional: Seed for reproducibility +seed: 42 + +# Fine-tuning method configuration +method: + type: supervised + supervised: + hyperparameters: + epochs: 3 # Number of training epochs + batch_size: 8 # Batch size (or "auto") + learning_rate_multiplier: 1.0 # Learning rate multiplier (or "auto") + +# Optional: Custom metadata +metadata: + project: "customer-support" + team: "ml-engineering" + version: "v1.0" + +# Optional: Integration with Weights & Biases for monitoring +integrations: + - type: wandb + config: + project: "fine-tuning-experiments" + name: "supervised-training" diff --git a/cli/azd/extensions/azure.ai.finetune/extension.yaml b/cli/azd/extensions/azure.ai.finetune/extension.yaml new file mode 100644 index 00000000000..7a5fd27263b --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/extension.yaml @@ -0,0 +1,16 @@ +id: azure.ai.finetune +namespace: ai.finetuning +displayName: Foundry Fine Tuning (Preview) +description: Extension for Foundry Fine Tuning. (Preview) +usage: azd ai finetuning [options] +version: 0.0.8-preview +language: go +capabilities: + - custom-commands +examples: + - name: init + description: Initialize a new AI fine-tuning project. + usage: azd ai finetuning init + - name: deploy + description: Deploy AI fine-tuning job to Azure. + usage: azd ai finetuning deploy diff --git a/cli/azd/extensions/azure.ai.finetune/go.mod b/cli/azd/extensions/azure.ai.finetune/go.mod new file mode 100644 index 00000000000..245759a75d1 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/go.mod @@ -0,0 +1,90 @@ +module azure.ai.finetune + +go 1.25 + +require ( + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 + github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/cognitiveservices/armcognitiveservices v1.8.0 + github.com/azure/azure-dev/cli/azd v0.0.0-20260108010518-45071d9a61bc + github.com/braydonk/yaml v0.9.0 + github.com/fatih/color v1.18.0 + github.com/openai/openai-go/v3 v3.2.0 + github.com/spf13/cobra v1.10.1 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/AlecAivazis/survey/v2 v2.3.7 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect + github.com/adam-lavrik/go-imath v0.0.0-20210910152346-265a42a96f0b // indirect + github.com/alecthomas/chroma/v2 v2.20.0 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/blang/semver/v4 v4.0.0 // indirect + github.com/buger/goterm v1.0.4 // indirect + github.com/charmbracelet/colorprofile v0.3.2 // indirect + github.com/charmbracelet/glamour v0.10.0 // indirect + github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect + github.com/charmbracelet/x/ansi v0.10.2 // indirect + github.com/charmbracelet/x/cellbuf v0.0.13 // indirect + github.com/charmbracelet/x/exp/slice v0.0.0-20251008171431-5d3777519489 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/clipperhouse/uax29/v2 v2.2.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/dlclark/regexp2 v1.11.5 // indirect + github.com/drone/envsubst v1.0.3 // indirect + github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/golang-jwt/jwt/v5 v5.3.0 // indirect + github.com/golobby/container/v3 v3.3.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/css v1.0.1 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/joho/godotenv v1.5.1 // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect + github.com/kylelemons/godebug v1.1.0 // indirect + github.com/lucasb-eyer/go-colorful v1.3.0 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.19 // indirect + github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect + github.com/microcosm-cc/bluemonday v1.0.27 // indirect + github.com/microsoft/ApplicationInsights-Go v0.4.4 // indirect + github.com/microsoft/go-deviceid v1.0.0 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/muesli/termenv v0.16.0 // indirect + github.com/nathan-fiscaletti/consolesize-go v0.0.0-20220204101620-317176b6684d // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/sethvargo/go-retry v0.3.0 // indirect + github.com/spf13/pflag v1.0.10 // indirect + github.com/stretchr/testify v1.11.1 // indirect + github.com/theckman/yacspin v0.13.12 // indirect + github.com/tidwall/gjson v1.18.0 // indirect + github.com/tidwall/match v1.2.0 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/tidwall/sjson v1.2.5 // indirect + github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + github.com/yuin/goldmark v1.7.13 // indirect + github.com/yuin/goldmark-emoji v1.0.6 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/otel v1.38.0 // indirect + go.opentelemetry.io/otel/metric v1.38.0 // indirect + go.opentelemetry.io/otel/sdk v1.38.0 // indirect + go.opentelemetry.io/otel/trace v1.38.0 // indirect + go.uber.org/atomic v1.11.0 // indirect + golang.org/x/crypto v0.45.0 // indirect + golang.org/x/net v0.47.0 // indirect + golang.org/x/sys v0.38.0 // indirect + golang.org/x/term v0.37.0 // indirect + golang.org/x/text v0.31.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251007200510-49b9836ed3ff // indirect + google.golang.org/grpc v1.76.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/cli/azd/extensions/azure.ai.finetune/go.sum b/cli/azd/extensions/azure.ai.finetune/go.sum new file mode 100644 index 00000000000..76f5dddebdf --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/go.sum @@ -0,0 +1,283 @@ +code.cloudfoundry.org/clock v0.0.0-20180518195852-02e53af36e6c/go.mod h1:QD9Lzhd/ux6eNQVUDVRJX/RKTigpewimNYBi7ivZKY8= +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= +github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 h1:JXg2dwJUmPB9JmtVmdEB16APJ7jurfbY5jnfXpJoRMc= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0/go.mod h1:YD5h/ldMsG0XiIw7PdyNhLxaM317eFh5yNLccNfGdyw= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 h1:Hk5QBxZQC1jb2Fwj6mpzme37xbCDdNTxU7O9eb5+LB4= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1/go.mod h1:IYus9qsFobWIc2YVwe/WPjcnyCkPKtnHAqUYeebc8z0= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 h1:9iefClla7iYpfYWdzPCRDozdmndjTm8DXdpCzPajMgA= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2/go.mod h1:XtLgD3ZD34DAaVIIAyG3objl5DynM3CQ/vMcbBNJZGI= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/cognitiveservices/armcognitiveservices v1.8.0 h1:ZMGAqCZov8+7iFUPWKVcTaLgNXUeTlz20sIuWkQWNfg= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/cognitiveservices/armcognitiveservices v1.8.0/go.mod h1:BElPQ/GZtrdQ2i5uDZw3OKLE1we75W0AEWyeBR1TWQA= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE= +github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 h1:XRzhVemXdgvJqCH0sFfrBUTnUJSBrBf7++ypk+twtRs= +github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0/go.mod h1:HKpQxkWaGLJ+D/5H8QRpyQXA1eKjxkFlOMwck5+33Jk= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s= +github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w= +github.com/adam-lavrik/go-imath v0.0.0-20210910152346-265a42a96f0b h1:g9SuFmxM/WucQFKTMSP+irxyf5m0RiUJreBDhGI6jSA= +github.com/adam-lavrik/go-imath v0.0.0-20210910152346-265a42a96f0b/go.mod h1:XjvqMUpGd3Xn9Jtzk/4GEBCSoBX0eB2RyriXgne0IdM= +github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= +github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= +github.com/alecthomas/chroma/v2 v2.20.0 h1:sfIHpxPyR07/Oylvmcai3X/exDlE8+FA820NTz+9sGw= +github.com/alecthomas/chroma/v2 v2.20.0/go.mod h1:e7tViK0xh/Nf4BYHl00ycY6rV7b8iXBksI9E359yNmA= +github.com/alecthomas/repr v0.5.1 h1:E3G4t2QbHTSNpPKBgMTln5KLkZHLOcU7r37J4pXBuIg= +github.com/alecthomas/repr v0.5.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8= +github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/azure/azure-dev/cli/azd v0.0.0-20260108010518-45071d9a61bc h1:Wei/1LT53Ojk96VwCL2SEXU9HxVtqU+a2DdAqYsLQlQ= +github.com/azure/azure-dev/cli/azd v0.0.0-20260108010518-45071d9a61bc/go.mod h1:j+bdvNwQPdYtSfFe/xbfWqYr8Guw9hiP1JOVpIBERj0= +github.com/benbjohnson/clock v1.3.5 h1:VvXlSJBzZpA/zum6Sj74hxwYI2DIxRWuNIoXAzHZz5o= +github.com/benbjohnson/clock v1.3.5/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= +github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= +github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= +github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M= +github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= +github.com/braydonk/yaml v0.9.0 h1:ewGMrVmEVpsm3VwXQDR388sLg5+aQ8Yihp6/hc4m+h4= +github.com/braydonk/yaml v0.9.0/go.mod h1:hcm3h581tudlirk8XEUPDBAimBPbmnL0Y45hCRl47N4= +github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= +github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/charmbracelet/colorprofile v0.3.2 h1:9J27WdztfJQVAQKX2WOlSSRB+5gaKqqITmrvb1uTIiI= +github.com/charmbracelet/colorprofile v0.3.2/go.mod h1:mTD5XzNeWHj8oqHb+S1bssQb7vIHbepiebQ2kPKVKbI= +github.com/charmbracelet/glamour v0.10.0 h1:MtZvfwsYCx8jEPFJm3rIBFIMZUfUJ765oX8V6kXldcY= +github.com/charmbracelet/glamour v0.10.0/go.mod h1:f+uf+I/ChNmqo087elLnVdCiVgjSKWuXa/l6NU2ndYk= +github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE= +github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA= +github.com/charmbracelet/x/ansi v0.10.2 h1:ith2ArZS0CJG30cIUfID1LXN7ZFXRCww6RUvAPA+Pzw= +github.com/charmbracelet/x/ansi v0.10.2/go.mod h1:HbLdJjQH4UH4AqA2HpRWuWNluRE6zxJH/yteYEYCFa8= +github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k= +github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= +github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a h1:G99klV19u0QnhiizODirwVksQB91TJKV/UaTnACcG30= +github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= +github.com/charmbracelet/x/exp/slice v0.0.0-20251008171431-5d3777519489 h1:a5q2sWiet6kgqucSGjYN1jhT2cn4bMKUwprtm2IGRto= +github.com/charmbracelet/x/exp/slice v0.0.0-20251008171431-5d3777519489/go.mod h1:vqEfX6xzqW1pKKZUUiFOKg0OQ7bCh54Q2vR/tserrRA= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/clipperhouse/uax29/v2 v2.2.0 h1:ChwIKnQN3kcZteTXMgb1wztSgaU+ZemkgWdohwgs8tY= +github.com/clipperhouse/uax29/v2 v2.2.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI= +github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ= +github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/drone/envsubst v1.0.3 h1:PCIBwNDYjs50AsLZPYdfhSATKaRg/FJmDc2D6+C2x8g= +github.com/drone/envsubst v1.0.3/go.mod h1:N2jZmlMufstn1KEqvbHjw40h1KyTmnVzHcSc9bFiJ2g= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJtLmY22n99HaZTz+r2Z51xUPi01m3wg= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golobby/container/v3 v3.3.2 h1:7u+RgNnsdVlhGoS8gY4EXAG601vpMMzLZlYqSp77Quw= +github.com/golobby/container/v3 v3.3.2/go.mod h1:RDdKpnKpV1Of11PFBe7Dxc2C1k2KaLE4FD47FflAmj0= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= +github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= +github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= +github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= +github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog= +github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/keybase/go-keychain v0.0.1 h1:way+bWYa6lDppZoZcgMbYsvC7GxljxrskdNInRtuthU= +github.com/keybase/go-keychain v0.0.1/go.mod h1:PdEILRW3i9D8JcdM+FmY6RwkHGnhHxXwkPPMeUgOK1k= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag= +github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= +github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= +github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= +github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= +github.com/microsoft/ApplicationInsights-Go v0.4.4 h1:G4+H9WNs6ygSCe6sUyxRc2U81TI5Es90b2t/MwX5KqY= +github.com/microsoft/ApplicationInsights-Go v0.4.4/go.mod h1:fKRUseBqkw6bDiXTs3ESTiU/4YTIHsQS4W3fP2ieF4U= +github.com/microsoft/go-deviceid v1.0.0 h1:i5AQ654Xk9kfvwJeKQm3w2+eT1+ImBDVEpAR0AjpP40= +github.com/microsoft/go-deviceid v1.0.0/go.mod h1:KY13FeVdHkzD8gy+6T8+kVmD/7RMpTaWW75K+T4uZWg= +github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= +github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= +github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= +github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/nathan-fiscaletti/consolesize-go v0.0.0-20220204101620-317176b6684d h1:NqRhLdNVlozULwM1B3VaHhcXYSgrOAv8V5BE65om+1Q= +github.com/nathan-fiscaletti/consolesize-go v0.0.0-20220204101620-317176b6684d/go.mod h1:cxIIfNMTwff8f/ZvRouvWYF6wOoO7nj99neWSx2q/Es= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/openai/openai-go/v3 v3.2.0 h1:2AbqFUCsoW2pm/2pUtPRuwK89dnoGHaQokzWsfoQO/U= +github.com/openai/openai-go/v3 v3.2.0/go.mod h1:UOpNxkqC9OdNXNUfpNByKOtB4jAL0EssQXq5p8gO0Xs= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE= +github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas= +github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= +github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= +github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/tedsuo/ifrit v0.0.0-20180802180643-bea94bb476cc/go.mod h1:eyZnKCc955uh98WQvzOm0dgAeLnf2O0Rz0LPoC5ze+0= +github.com/theckman/yacspin v0.13.12 h1:CdZ57+n0U6JMuh2xqjnjRq5Haj6v1ner2djtLQRzJr4= +github.com/theckman/yacspin v0.13.12/go.mod h1:Rd2+oG2LmQi5f3zC3yeZAOl245z8QOvrH4OPOJNZxLg= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/match v1.2.0 h1:0pt8FlkOwjN2fPt4bIl4BoNxb98gGHN2ObFEDkrfZnM= +github.com/tidwall/match v1.2.0/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= +github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark v1.7.13 h1:GPddIs617DnBLFFVJFgpo1aBfe/4xcvMc3SB5t/D0pA= +github.com/yuin/goldmark v1.7.13/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg= +github.com/yuin/goldmark-emoji v1.0.6 h1:QWfF2FYaXwL74tfGOW5izeiZepUDroDJfWubQI9HTHs= +github.com/yuin/goldmark-emoji v1.0.6/go.mod h1:ukxJDKFpdFb5x0a5HqbdlcKtebh086iJpI31LTKmWuA= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= +go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= +go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= +go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= +golang.org/x/exp v0.0.0-20250911091902-df9299821621 h1:2id6c1/gto0kaHYyrixvknJ8tUK/Qs5IsmBtrc+FtgU= +golang.org/x/exp v0.0.0-20250911091902-df9299821621/go.mod h1:TwQYMMnGpvZyc+JpB/UAuTNIsVJifOlSkrZkhcvpVUk= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210331175145-43e1dd70ce54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251007200510-49b9836ed3ff h1:A90eA31Wq6HOMIQlLfzFwzqGKBTuaVztYu/g8sn+8Zc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251007200510-49b9836ed3ff/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= +google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= +google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/init.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/init.go new file mode 100644 index 00000000000..de7262ea552 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/init.go @@ -0,0 +1,920 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package cmd + +import ( + "context" + "encoding/json" + "fmt" + "net/url" + "os" + "path/filepath" + "strings" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/Azure/azure-sdk-for-go/sdk/azidentity" + "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/cognitiveservices/armcognitiveservices" + "github.com/azure/azure-dev/cli/azd/pkg/azdext" + "github.com/azure/azure-dev/cli/azd/pkg/exec" + "github.com/azure/azure-dev/cli/azd/pkg/input" + "github.com/azure/azure-dev/cli/azd/pkg/tools/github" + "github.com/fatih/color" + "github.com/spf13/cobra" + + "azure.ai.finetune/internal/services" +) + +type initFlags struct { + rootFlagsDefinition + template string + projectResourceId string + jobId string + src string + env string +} + +// AiProjectResourceConfig represents the configuration for an AI project resource +type AiProjectResourceConfig struct { + Models []map[string]interface{} `json:"models,omitempty"` +} + +type InitAction struct { + azdClient *azdext.AzdClient + //azureClient *azure.AzureClient + azureContext *azdext.AzureContext + //composedResources []*azdext.ComposedResource + console input.Console + credential azcore.TokenCredential + projectConfig *azdext.ProjectConfig + environment *azdext.Environment + flags *initFlags +} + +// GitHubUrlInfo holds parsed information from a GitHub URL +type GitHubUrlInfo struct { + RepoSlug string + Branch string + FilePath string + Hostname string +} + +const AiFineTuningHost = "azure.ai.finetune" + +func newInitCommand(rootFlags rootFlagsDefinition) *cobra.Command { + flags := &initFlags{ + rootFlagsDefinition: rootFlags, + } + + cmd := &cobra.Command{ + Use: "init [-t ] [-p ]", + Short: fmt.Sprintf("Initialize a new AI Fine-tuning project. %s", color.YellowString("(Preview)")), + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := azdext.WithAccessToken(cmd.Context()) + + azdClient, err := azdext.NewAzdClient() + if err != nil { + return fmt.Errorf("failed to create azd client: %w", err) + } + defer azdClient.Close() + + azureContext, projectConfig, environment, err := ensureAzureContext(ctx, flags, azdClient) + if err != nil { + return fmt.Errorf("failed to ground into a project context: %w", err) + } + + credential, err := azidentity.NewAzureDeveloperCLICredential(&azidentity.AzureDeveloperCLICredentialOptions{ + TenantID: azureContext.Scope.TenantId, + AdditionallyAllowedTenants: []string{"*"}, + }) + if err != nil { + return fmt.Errorf("failed to create azure credential: %w", err) + } + + console := input.NewConsole( + false, // noPrompt + true, // isTerminal + input.Writers{Output: os.Stdout}, + input.ConsoleHandles{ + Stderr: os.Stderr, + Stdin: os.Stdin, + Stdout: os.Stdout, + }, + nil, // formatter + nil, // externalPromptCfg + ) + + action := &InitAction{ + azdClient: azdClient, + azureContext: azureContext, + console: console, + credential: credential, + projectConfig: projectConfig, + environment: environment, + flags: flags, + } + + if err := action.Run(ctx); err != nil { + return fmt.Errorf("failed to run start action: %w", err) + } + + return nil + }, + } + + cmd.Flags().StringVarP(&flags.template, "template", "t", "", + "URL or path to a fine-tune job template") + + cmd.Flags().StringVarP(&flags.projectResourceId, "project", "p", "", + "Existing Microsoft Foundry Project Id to initialize your azd environment with") + + cmd.Flags().StringVarP(&flags.src, "source", "s", "", + "Local path for project output") + + cmd.Flags().StringVarP(&flags.jobId, "from-job", "j", "", + "Clone configuration from an existing job ID") + + cmd.Flags().StringVarP(&flags.env, "environment", "e", "", "The name of the azd environment to use.") + + return cmd +} + +type FoundryProject struct { + TenantId string `json:"tenantId"` + SubscriptionId string `json:"subscriptionId"` + Location string `json:"location"` + ResourceGroupName string `json:"resourceGroupName"` + AiAccountName string `json:"aiAccountName"` + AiProjectName string `json:"aiProjectName"` +} + +func extractProjectDetails(projectResourceId string) (*FoundryProject, error) { + resourceId, err := arm.ParseResourceID(projectResourceId) + if err != nil { + return nil, fmt.Errorf("failed to parse project resource ID: %w", err) + } + + // Validate that this is a Cognitive Services project resource + if resourceId.ResourceType.Namespace != "Microsoft.CognitiveServices" || len(resourceId.ResourceType.Types) != 2 || + resourceId.ResourceType.Types[0] != "accounts" || resourceId.ResourceType.Types[1] != "projects" { + return nil, fmt.Errorf("the given resource ID is not a Microsoft Foundry project. Expected format: /subscriptions/[SUBSCRIPTION_ID]/resourceGroups/[RESOURCE_GROUP]/providers/Microsoft.CognitiveServices/accounts/[ACCOUNT_NAME]/projects/[PROJECT_NAME]") + } + + // Extract the components + return &FoundryProject{ + SubscriptionId: resourceId.SubscriptionID, + ResourceGroupName: resourceId.ResourceGroupName, + AiAccountName: resourceId.Parent.Name, + AiProjectName: resourceId.Name, + }, nil +} + +func getExistingEnvironment(ctx context.Context, name *string, azdClient *azdext.AzdClient) (*azdext.Environment, error) { + var env *azdext.Environment + if name == nil || *name == "" { + envResponse, err := azdClient.Environment().GetCurrent(ctx, &azdext.EmptyRequest{}) + if err != nil { + return nil, fmt.Errorf("failed to get current environment: %w", err) + } + env = envResponse.Environment + } else { + envResponse, err := azdClient.Environment().Get(ctx, &azdext.GetEnvironmentRequest{ + Name: *name, + }) + if err != nil { + return nil, fmt.Errorf("failed to get environment '%s': %w", *name, err) + } + env = envResponse.Environment + } + + return env, nil +} + +func ensureEnvironment(ctx context.Context, flags *initFlags, azdClient *azdext.AzdClient) (*azdext.Environment, error) { + var foundryProject *FoundryProject + + // Parse the Microsoft Foundry project resource ID if provided & Fetch Tenant Id and Location using parsed information + if flags.projectResourceId != "" { + var err error + foundryProject, err = extractProjectDetails(flags.projectResourceId) + if err != nil { + return nil, fmt.Errorf("failed to parse Microsoft Foundry project ID: %w", err) + } + + // Get the tenant ID + tenantResponse, err := azdClient.Account().LookupTenant(ctx, &azdext.LookupTenantRequest{ + SubscriptionId: foundryProject.SubscriptionId, + }) + if err != nil { + return nil, fmt.Errorf("failed to get tenant ID: %w", err) + } + foundryProject.TenantId = tenantResponse.TenantId + credential, err := azidentity.NewAzureDeveloperCLICredential(&azidentity.AzureDeveloperCLICredentialOptions{ + TenantID: foundryProject.TenantId, + AdditionallyAllowedTenants: []string{"*"}, + }) + if err != nil { + return nil, fmt.Errorf("failed to create Azure credential: %w", err) + } + + // Create Cognitive Services Projects client + projectsClient, err := armcognitiveservices.NewProjectsClient(foundryProject.SubscriptionId, credential, nil) + if err != nil { + return nil, fmt.Errorf("failed to create Cognitive Services Projects client: %w", err) + } + + // Get the Microsoft Foundry project + projectResp, err := projectsClient.Get(ctx, foundryProject.ResourceGroupName, foundryProject.AiAccountName, foundryProject.AiProjectName, nil) + if err != nil { + return nil, fmt.Errorf("failed to get Microsoft Foundry project: %w", err) + } + + foundryProject.Location = *projectResp.Location + } + + // Get specified or current environment if it exists + existingEnv, err := getExistingEnvironment(ctx, &flags.env, azdClient) + if err != nil { + return nil, fmt.Errorf("failed to get existing environment: %w", err) + } + if existingEnv == nil { + // Dispatch `azd env new` to create a new environment with interactive flow + fmt.Println("Lets create a new default azd environment for your project.") + + envArgs := []string{"env", "new"} + if flags.env != "" { + envArgs = append(envArgs, flags.env) + } + + if flags.projectResourceId != "" { + envArgs = append(envArgs, "--subscription", foundryProject.SubscriptionId) + envArgs = append(envArgs, "--location", foundryProject.Location) + } + + // Dispatch a workflow to create a new environment + // Handles both interactive and no-prompt flows + workflow := &azdext.Workflow{ + Name: "env new", + Steps: []*azdext.WorkflowStep{ + {Command: &azdext.WorkflowCommand{Args: envArgs}}, + }, + } + + _, err := azdClient.Workflow().Run(ctx, &azdext.RunWorkflowRequest{ + Workflow: workflow, + }) + if err != nil { + return nil, fmt.Errorf("failed to create new azd environment: %w", err) + } + + // Re-fetch the environment after creation + existingEnv, err = getExistingEnvironment(ctx, &flags.env, azdClient) + if err != nil { + return nil, fmt.Errorf("failed to get environment after creation: %w", err) + } + } + + // Set TenantId, SubscriptionId, ResourceGroupName, AiAccountName, and Location in the environment + if flags.projectResourceId != "" { + + _, err := azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: existingEnv.Name, + Key: "AZURE_TENANT_ID", + Value: foundryProject.TenantId, + }) + if err != nil { + return nil, fmt.Errorf("failed to set AZURE_TENANT_ID in azd environment: %w", err) + } + + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: existingEnv.Name, + Key: "AZURE_SUBSCRIPTION_ID", + Value: foundryProject.SubscriptionId, + }) + if err != nil { + return nil, fmt.Errorf("failed to set AZURE_SUBSCRIPTION_ID in azd environment: %w", err) + } + + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: existingEnv.Name, + Key: "AZURE_RESOURCE_GROUP_NAME", + Value: foundryProject.ResourceGroupName, + }) + if err != nil { + return nil, fmt.Errorf("failed to set AZURE_RESOURCE_GROUP_NAME in azd environment: %w", err) + } + + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: existingEnv.Name, + Key: "AZURE_ACCOUNT_NAME", + Value: foundryProject.AiAccountName, + }) + if err != nil { + return nil, fmt.Errorf("failed to set AZURE_ACCOUNT_NAME in azd environment: %w", err) + } + + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: existingEnv.Name, + Key: "AZURE_PROJECT_NAME", + Value: foundryProject.AiProjectName, + }) + if err != nil { + return nil, fmt.Errorf("failed to set AZURE_PROJECT_NAME in azd environment: %w", err) + } + + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: existingEnv.Name, + Key: "AZURE_LOCATION", + Value: foundryProject.Location, + }) + if err != nil { + return nil, fmt.Errorf("failed to set AZURE_LOCATION in environment: %w", err) + } + + } + + return existingEnv, nil +} +func ensureProject(ctx context.Context, flags *initFlags, azdClient *azdext.AzdClient) (*azdext.ProjectConfig, error) { + projectResponse, err := azdClient.Project().Get(ctx, &azdext.EmptyRequest{}) + if err != nil { + fmt.Println("Lets get your project initialized.") + + initArgs := []string{"init"} + if flags.env != "" { + initArgs = append(initArgs, "-e", flags.env) + } + + // We don't have a project yet + // Dispatch a workflow to init the project + workflow := &azdext.Workflow{ + Name: "init", + Steps: []*azdext.WorkflowStep{ + {Command: &azdext.WorkflowCommand{Args: initArgs}}, + }, + } + + _, err := azdClient.Workflow().Run(ctx, &azdext.RunWorkflowRequest{ + Workflow: workflow, + }) + + if err != nil { + return nil, fmt.Errorf("failed to initialize project: %w", err) + } + + projectResponse, err = azdClient.Project().Get(ctx, &azdext.EmptyRequest{}) + if err != nil { + return nil, fmt.Errorf("failed to get project: %w", err) + } + + fmt.Println() + } + + if projectResponse.Project == nil { + return nil, fmt.Errorf("project not found") + } + + return projectResponse.Project, nil +} + +func ensureAzureContext( + ctx context.Context, + flags *initFlags, + azdClient *azdext.AzdClient, +) (*azdext.AzureContext, *azdext.ProjectConfig, *azdext.Environment, error) { + project, err := ensureProject(ctx, flags, azdClient) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to ensure project: %w", err) + } + + env, err := ensureEnvironment(ctx, flags, azdClient) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to ensure environment: %w", err) + } + + envValues, err := azdClient.Environment().GetValues(ctx, &azdext.GetEnvironmentRequest{ + Name: env.Name, + }) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to get environment values: %w", err) + } + + envValueMap := make(map[string]string) + for _, value := range envValues.KeyValues { + envValueMap[value.Key] = value.Value + } + + azureContext := &azdext.AzureContext{ + Scope: &azdext.AzureScope{ + TenantId: envValueMap["AZURE_TENANT_ID"], + SubscriptionId: envValueMap["AZURE_SUBSCRIPTION_ID"], + Location: envValueMap["AZURE_LOCATION"], + ResourceGroup: envValueMap["AZURE_RESOURCE_GROUP_NAME"], + }, + Resources: []string{}, + } + + if azureContext.Scope.SubscriptionId == "" { + fmt.Print() + fmt.Println("It looks like we first need to connect to your Azure subscription.") + + subscriptionResponse, err := azdClient.Prompt().PromptSubscription(ctx, &azdext.PromptSubscriptionRequest{}) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to prompt for subscription: %w", err) + } + + azureContext.Scope.SubscriptionId = subscriptionResponse.Subscription.Id + azureContext.Scope.TenantId = subscriptionResponse.Subscription.TenantId + + // Set the subscription ID in the environment + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: env.Name, + Key: "AZURE_TENANT_ID", + Value: azureContext.Scope.TenantId, + }) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to set AZURE_TENANT_ID in environment: %w", err) + } + + // Set the tenant ID in the environment + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: env.Name, + Key: "AZURE_SUBSCRIPTION_ID", + Value: azureContext.Scope.SubscriptionId, + }) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to set AZURE_SUBSCRIPTION_ID in environment: %w", err) + } + } + if azureContext.Scope.ResourceGroup == "" { + fmt.Print() + + resourceGroupResponse, err := azdClient.Prompt(). + PromptResourceGroup(ctx, &azdext.PromptResourceGroupRequest{ + AzureContext: azureContext, + }) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to prompt for resource group: %w", err) + } + + azureContext.Scope.ResourceGroup = resourceGroupResponse.ResourceGroup.Name + + // Set the subscription ID in the environment + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: env.Name, + Key: "AZURE_RESOURCE_GROUP_NAME", + Value: azureContext.Scope.ResourceGroup, + }) + + } + + if envValueMap["AZURE_ACCOUNT_NAME"] == "" { + + foundryProjectResponse, err := azdClient.Prompt().PromptResourceGroupResource(ctx, &azdext.PromptResourceGroupResourceRequest{ + AzureContext: azureContext, + Options: &azdext.PromptResourceOptions{ + ResourceType: "Microsoft.CognitiveServices/accounts/projects", + ResourceTypeDisplayName: "AI Foundry project", + SelectOptions: &azdext.PromptResourceSelectOptions{ + AllowNewResource: to.Ptr(false), + Message: "Select a Foundry project", + LoadingMessage: "Fetching Foundry projects...", + }, + }, + }) + + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to get Microsoft Foundry project: %w", err) + } + + fpDetails, err := extractProjectDetails(foundryProjectResponse.Resource.Id) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse Microsoft Foundry project ID: %w", err) + } + + credential, err := azidentity.NewAzureDeveloperCLICredential(&azidentity.AzureDeveloperCLICredentialOptions{ + TenantID: azureContext.Scope.TenantId, + AdditionallyAllowedTenants: []string{"*"}, + }) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to create Azure credential: %w", err) + } + + // Create Cognitive Services Projects client + projectsClient, err := armcognitiveservices.NewProjectsClient(azureContext.Scope.SubscriptionId, credential, nil) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to create Cognitive Services Projects client: %w", err) + } + + // Get the Microsoft Foundry project + projectResp, err := projectsClient.Get(ctx, azureContext.Scope.ResourceGroup, fpDetails.AiAccountName, fpDetails.AiProjectName, nil) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to get Microsoft Foundry project: %w", err) + } + + // Set the subscription ID in the environment + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: env.Name, + Key: "AZURE_ACCOUNT_NAME", + Value: fpDetails.AiAccountName, + }) + + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: env.Name, + Key: "AZURE_PROJECT_NAME", + Value: fpDetails.AiProjectName, + }) + + location := *projectResp.Location + + // Set the location in the environment + _, err = azdClient.Environment().SetValue(ctx, &azdext.SetEnvRequest{ + EnvName: env.Name, + Key: "AZURE_LOCATION", + Value: location, + }) + } + + return azureContext, project, env, nil +} + +func (a *InitAction) Run(ctx context.Context) error { + // Validate that either template or from-job is provided, but not both + if a.flags.template != "" && a.flags.jobId != "" { + return fmt.Errorf("cannot specify both --template and --from-job flags") + } + + color.Green("Creating fine-tuning Job definition...") + + var cwd string + var err error + + // Use src flag if provided, otherwise use current working directory + if a.flags.src != "" { + cwd = a.flags.src + } else { + cwd, err = os.Getwd() + if err != nil { + return fmt.Errorf("failed to get current working directory: %w", err) + } + } + + if a.flags.template == "" && a.flags.jobId == "" { + defaultBaseModel := "gpt-4o-mini" + defaultMethod := "supervised" + baseModelForFineTuningInput, err := a.azdClient.Prompt().Prompt(ctx, &azdext.PromptRequest{ + Options: &azdext.PromptOptions{ + Message: "Enter base model name for fine tuning (defaults to model name)", + IgnoreHintKeys: true, + DefaultValue: defaultBaseModel, + }, + }) + ftMethodInput, err := a.azdClient.Prompt().Prompt(ctx, &azdext.PromptRequest{ + Options: &azdext.PromptOptions{ + Message: "Enter fine-tuning method (defaults to supervised)", + IgnoreHintKeys: true, + DefaultValue: defaultMethod, + }, + }) + if err != nil { + return err + } + fmt.Printf("Base model : %s, Fine-tuning method: %s\n", baseModelForFineTuningInput.Value, ftMethodInput.Value) + + // Create YAML file with the fine-tuning job template + yamlContent := fmt.Sprintf(`name: ft-cli-job +description: Template to demonstrate fine-tuning via CLI +model: %s +method: + type: %s +`, baseModelForFineTuningInput.Value, ftMethodInput.Value) + + // Determine the output directory (use src flag or current directory) + outputDir := a.flags.src + if outputDir == "" { + var err error + outputDir, err = os.Getwd() + if err != nil { + return fmt.Errorf("failed to get current working directory: %w", err) + } + } + + yamlFilePath := filepath.Join(outputDir, "config", "job.yaml") + if err := os.MkdirAll(filepath.Dir(yamlFilePath), 0755); err != nil { + return fmt.Errorf("failed to create config directory: %w", err) + } + + if err := os.WriteFile(yamlFilePath, []byte(yamlContent), 0644); err != nil { + return fmt.Errorf("failed to write job.yaml file: %w", err) + } + + fmt.Printf("Created fine-tuning job template at: %s\n", yamlFilePath) + + // Set the template flag to the newly created YAML file + a.flags.template = yamlFilePath + } else if a.flags.template != "" { + + if a.isGitHubUrl(a.flags.template) { + // For container agents, download the entire parent directory + fmt.Println("Downloading full directory for fine-tuning configuration from GitHub...") + var ghCli *github.Cli + var console input.Console + var urlInfo *GitHubUrlInfo + // Create a simple console and command runner for GitHub CLI + commandRunner := exec.NewCommandRunner(&exec.RunnerOptions{ + Stdout: os.Stdout, + Stderr: os.Stderr, + }) + + console = input.NewConsole( + false, // noPrompt + true, // isTerminal + input.Writers{Output: os.Stdout}, + input.ConsoleHandles{ + Stderr: os.Stderr, + Stdin: os.Stdin, + Stdout: os.Stdout, + }, + nil, // formatter + nil, // externalPromptCfg + ) + ghCli, err = github.NewGitHubCli(ctx, console, commandRunner) + if err != nil { + return fmt.Errorf("creating GitHub CLI: %w", err) + } + + // Create a new AZD client + azdClient, err := azdext.NewAzdClient() + if err != nil { + return fmt.Errorf("failed to create azd client: %w", err) + } + defer azdClient.Close() + + // Call the ParseGitHubUrl RPC method + parseResponse, err := azdClient.Project().ParseGitHubUrl(ctx, &azdext.ParseGitHubUrlRequest{ + Url: a.flags.template, + }) + if err != nil { + return fmt.Errorf("parsing GitHub URL via azd extension: %w", err) + } + + // Map the response to GitHubUrlInfo + urlInfo = &GitHubUrlInfo{ + RepoSlug: parseResponse.RepoSlug, + Branch: parseResponse.Branch, + FilePath: parseResponse.FilePath, + Hostname: parseResponse.Hostname, + } + + if urlInfo.Branch != "" { + fmt.Printf("Downloaded manifest from branch: %s\n", urlInfo.Branch) + } + err = downloadParentDirectory(ctx, urlInfo, cwd, ghCli, console) + if err != nil { + return fmt.Errorf("downloading parent directory: %w", err) + } + } else { + if err := copyDirectory(a.flags.template, cwd); err != nil { + return fmt.Errorf("failed to copy directory: %w", err) + } + } + } else if a.flags.jobId != "" { + fmt.Printf("Cloning fine-tuning job configuration from job ID: %s\n", a.flags.jobId) + fineTuneSvc, err := services.NewFineTuningService(ctx, a.azdClient, nil) + if err != nil { + return fmt.Errorf("failed to create fine-tuning service: %w", err) + } + + // Fetch job details + fmt.Printf("Fetching fine-tuning job %s...\n", a.flags.jobId) + job, err := fineTuneSvc.GetFineTuningJobDetails(ctx, a.flags.jobId) + if err != nil { + return fmt.Errorf("failed to fetch fine-tuning job details: %w", err) + } + + // Create YAML file with job configuration + yamlContent := fmt.Sprintf(`name: %s +description: Cloned configuration from job %s +model: %s +seed: %d +method: + type: %s +`, a.flags.jobId, a.flags.jobId, job.Model, job.Seed, job.Method) + + // Add hyperparameters nested under method type if present + if job.Hyperparameters != nil { + yamlContent += fmt.Sprintf(` %s: + hyperparameters: + epochs: %d + batch_size: %d + learning_rate_multiplier: %f +`, job.Method, job.Hyperparameters.NEpochs, job.Hyperparameters.BatchSize, job.Hyperparameters.LearningRateMultiplier) + + // Add beta parameter only for DPO method + if strings.ToLower(job.Method) == "dpo" { + yamlContent += fmt.Sprintf(" beta: %v\n", job.Hyperparameters.Beta) + } + + // Add reinforcement-specific hyperparameters + if strings.ToLower(job.Method) == "reinforcement" { + yamlContent += fmt.Sprintf(" compute_multiplier: %f\n", job.Hyperparameters.ComputeMultiplier) + yamlContent += fmt.Sprintf(" eval_interval: %d\n", job.Hyperparameters.EvalInterval) + yamlContent += fmt.Sprintf(" eval_samples: %d\n", job.Hyperparameters.EvalSamples) + yamlContent += fmt.Sprintf(" reasoning_effort: %s\n", job.Hyperparameters.ReasoningEffort) + } + } + + // Add training and validation files + yamlContent += fmt.Sprintf("training_file: %s\n", job.TrainingFile) + if job.ValidationFile != "" { + yamlContent += fmt.Sprintf("validation_file: %s\n", job.ValidationFile) + } + + // Determine the output directory (use src flag or current directory) + outputDir := a.flags.src + if outputDir == "" { + var err error + outputDir, err = os.Getwd() + if err != nil { + return fmt.Errorf("failed to get current working directory: %w", err) + } + } + + yamlFilePath := filepath.Join(outputDir, "config", "job.yaml") + if err := os.MkdirAll(filepath.Dir(yamlFilePath), 0755); err != nil { + return fmt.Errorf("failed to create config directory: %w", err) + } + + if err := os.WriteFile(yamlFilePath, []byte(yamlContent), 0644); err != nil { + return fmt.Errorf("failed to write job.yaml file: %w", err) + } + + fmt.Printf("Created fine-tuning job configuration at: %s\n", yamlFilePath) + + // Set the template flag to the newly created YAML file + a.flags.template = yamlFilePath + } + fmt.Println() + color.Green("Initialized fine-tuning Project.") + + return nil +} + +func (a *InitAction) isGitHubUrl(manifestPointer string) bool { + // Check if it's a GitHub URL based on the patterns from downloadGithubManifest + parsedURL, err := url.Parse(manifestPointer) + if err != nil { + return false + } + hostname := parsedURL.Hostname() + + // Check for GitHub URL patterns as defined in downloadGithubManifest + return strings.HasPrefix(hostname, "raw.githubusercontent") || + strings.HasPrefix(hostname, "api.github") || + strings.Contains(hostname, "github") +} + +func downloadParentDirectory( + ctx context.Context, urlInfo *GitHubUrlInfo, targetDir string, ghCli *github.Cli, console input.Console) error { + + // Get parent directory by removing the filename from the file path + pathParts := strings.Split(urlInfo.FilePath, "/") + if len(pathParts) <= 1 { + fmt.Println("The file agent.yaml is at repository root, no parent directory to download") + return nil + } + + parentDirPath := strings.Join(pathParts[:len(pathParts)-1], "/") + fmt.Printf("Downloading parent directory '%s' from repository '%s', branch '%s'\n", parentDirPath, urlInfo.RepoSlug, urlInfo.Branch) + + // Download directory contents + if err := downloadDirectoryContents(ctx, urlInfo.Hostname, urlInfo.RepoSlug, parentDirPath, urlInfo.Branch, targetDir, ghCli, console); err != nil { + return fmt.Errorf("failed to download directory contents: %w", err) + } + + fmt.Printf("Successfully downloaded parent directory to: %s\n", targetDir) + return nil +} + +func downloadDirectoryContents( + ctx context.Context, hostname string, repoSlug string, dirPath string, branch string, localPath string, ghCli *github.Cli, console input.Console) error { + + // Get directory contents using GitHub API + apiPath := fmt.Sprintf("/repos/%s/contents/%s", repoSlug, dirPath) + if branch != "" { + apiPath += fmt.Sprintf("?ref=%s", branch) + } + + dirContentsJson, err := ghCli.ApiCall(ctx, hostname, apiPath, github.ApiCallOptions{}) + if err != nil { + return fmt.Errorf("failed to get directory contents: %w", err) + } + + // Parse the directory contents JSON + var dirContents []map[string]interface{} + if err := json.Unmarshal([]byte(dirContentsJson), &dirContents); err != nil { + return fmt.Errorf("failed to parse directory contents JSON: %w", err) + } + + // Download each file and subdirectory + for _, item := range dirContents { + name, ok := item["name"].(string) + if !ok { + continue + } + + itemType, ok := item["type"].(string) + if !ok { + continue + } + + itemPath := fmt.Sprintf("%s/%s", dirPath, name) + itemLocalPath := filepath.Join(localPath, name) + + if itemType == "file" { + // Download file + fmt.Printf("Downloading file: %s\n", itemPath) + fileApiPath := fmt.Sprintf("/repos/%s/contents/%s", repoSlug, itemPath) + if branch != "" { + fileApiPath += fmt.Sprintf("?ref=%s", branch) + } + + fileContent, err := ghCli.ApiCall(ctx, hostname, fileApiPath, github.ApiCallOptions{ + Headers: []string{"Accept: application/vnd.github.v3.raw"}, + }) + if err != nil { + return fmt.Errorf("failed to download file %s: %w", itemPath, err) + } + + if err := os.WriteFile(itemLocalPath, []byte(fileContent), 0644); err != nil { + return fmt.Errorf("failed to write file %s: %w", itemLocalPath, err) + } + } else if itemType == "dir" { + // Recursively download subdirectory + fmt.Printf("Downloading directory: %s\n", itemPath) + if err := os.MkdirAll(itemLocalPath, 0755); err != nil { + return fmt.Errorf("failed to create directory %s: %w", itemLocalPath, err) + } + + // Recursively download directory contents + if err := downloadDirectoryContents(ctx, hostname, repoSlug, itemPath, branch, itemLocalPath, ghCli, console); err != nil { + return fmt.Errorf("failed to download subdirectory %s: %w", itemPath, err) + } + } + } + + return nil +} + +// copyDirectory recursively copies all files and directories from src to dst +func copyDirectory(src, dst string) error { + return filepath.WalkDir(src, func(path string, d os.DirEntry, err error) error { + if err != nil { + return err + } + + // Calculate the destination path + relPath, err := filepath.Rel(src, path) + if err != nil { + return err + } + dstPath := filepath.Join(dst, relPath) + + if d.IsDir() { + // Create directory and continue processing its contents + return os.MkdirAll(dstPath, 0755) + } else { + // Copy file + return copyFile(path, dstPath) + } + }) +} + +// copyFile copies a single file from src to dst +func copyFile(src, dst string) error { + // Create the destination directory if it doesn't exist + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + // Open source file + srcFile, err := os.Open(src) + if err != nil { + return err + } + defer srcFile.Close() + + // Create destination file + dstFile, err := os.Create(dst) + if err != nil { + return err + } + defer dstFile.Close() + + // Copy file contents + _, err = srcFile.WriteTo(dstFile) + return err +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go new file mode 100644 index 00000000000..d42e69587ce --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go @@ -0,0 +1,336 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package cmd + +import ( + "fmt" + "strings" + + "github.com/fatih/color" + "github.com/spf13/cobra" + + "github.com/azure/azure-dev/cli/azd/pkg/azdext" + "github.com/azure/azure-dev/cli/azd/pkg/ux" + + "azure.ai.finetune/internal/services" + "azure.ai.finetune/internal/utils" + "azure.ai.finetune/pkg/models" +) + +func newOperationCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "jobs", + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + return validateEnvironment(cmd.Context()) + }, + Short: "Manage fine-tuning jobs", + } + + cmd.AddCommand(newOperationSubmitCommand()) + cmd.AddCommand(newOperationShowCommand()) + cmd.AddCommand(newOperationListCommand()) + // cmd.AddCommand(newOperationActionCommand()) + // cmd.AddCommand(newOperationDeployModelCommand()) + + return cmd +} + +// formatFineTunedModel returns the model name or "NA" if blank +func formatFineTunedModel(model string) string { + if model == "" { + return "NA" + } + return model +} + +func newOperationSubmitCommand() *cobra.Command { + var filename string + var model string + var trainingFile string + var validationFile string + var suffix string + var seed int64 + cmd := &cobra.Command{ + Use: "submit", + Short: "submit fine tuning job", + RunE: func(cmd *cobra.Command, args []string) error { + ctx := azdext.WithAccessToken(cmd.Context()) + if filename == "" && (model == "" || trainingFile == "") { + return fmt.Errorf("either config file or model and training-file parameters are required") + } + + azdClient, err := azdext.NewAzdClient() + if err != nil { + return fmt.Errorf("failed to create azd client: %w", err) + } + defer azdClient.Close() + + // Show spinner while creating job + spinner := ux.NewSpinner(&ux.SpinnerOptions{ + Text: "creating fine-tuning job...", + }) + if err := spinner.Start(ctx); err != nil { + fmt.Printf("failed to start spinner: %v\n", err) + } + + // Parse and validate the YAML configuration file if provided + var config *models.CreateFineTuningRequest + if filename != "" { + color.Green("\nparsing configuration file...") + config, err = utils.ParseCreateFineTuningRequestConfig(filename) + if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() + return err + } + } else { + config = &models.CreateFineTuningRequest{} + } + + // Override config values with command-line parameters if provided + if model != "" { + config.BaseModel = model + } + if trainingFile != "" { + + config.TrainingFile = trainingFile + } + if validationFile != "" { + config.ValidationFile = &validationFile + } + if suffix != "" { + config.Suffix = &suffix + } + if seed != 0 { + config.Seed = &seed + } + + fineTuneSvc, err := services.NewFineTuningService(ctx, azdClient, nil) + if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() + return err + } + + // Submit the fine-tuning job using CreateJob from JobWrapper + job, err := fineTuneSvc.CreateFineTuningJob(ctx, config) + _ = spinner.Stop(ctx) + fmt.Println() + + if err != nil { + return err + } + + // Print success message + fmt.Println("\n", strings.Repeat("=", 60)) + color.Green("\nsuccessfully submitted fine-tuning Job!\n") + fmt.Printf("Job ID: %s\n", job.ID) + fmt.Printf("Model: %s\n", job.BaseModel) + fmt.Printf("Status: %s\n", job.Status) + fmt.Printf("Created: %s\n", job.CreatedAt) + if job.FineTunedModel != "" { + fmt.Printf("Fine-tuned: %s\n", job.FineTunedModel) + } + fmt.Println(strings.Repeat("=", 60)) + return nil + }, + } + + cmd.Flags().StringVarP(&filename, "file", "f", "", "Path to the config file.") + cmd.Flags().StringVarP(&model, "model", "m", "", "Base model to fine-tune. Overrides config file. Required if --file is not provided") + cmd.Flags().StringVarP(&trainingFile, "training-file", "t", "", "Training file ID or local path. Use 'local:' prefix for local paths. Required if --file is not provided") + cmd.Flags().StringVarP(&validationFile, "validation-file", "v", "", "Validation file ID or local path. Use 'local:' prefix for local paths.") + cmd.Flags().StringVarP(&suffix, "suffix", "s", "", "An optional string of up to 64 characters that will be added to your fine-tuned model name. Overrides config file.") + cmd.Flags().Int64VarP(&seed, "seed", "r", 0, "Random seed for reproducibility of the job. If a seed is not specified, one will be generated for you. Overrides config file.") + + //Either config file should be provided or at least `model` & `training-file` parameters + cmd.MarkFlagFilename("file", "yaml", "yml") + cmd.MarkFlagsOneRequired("file", "model") + cmd.MarkFlagsRequiredTogether("model", "training-file") + return cmd +} + +// newOperationShowCommand creates a command to show the fine-tuning job details +func newOperationShowCommand() *cobra.Command { + var jobID string + var logs bool + var output string + + cmd := &cobra.Command{ + Use: "show", + Short: "Shows detailed information about a specific job.", + RunE: func(cmd *cobra.Command, args []string) error { + ctx := azdext.WithAccessToken(cmd.Context()) + azdClient, err := azdext.NewAzdClient() + if err != nil { + return fmt.Errorf("failed to create azd client: %w", err) + } + defer azdClient.Close() + + // Show spinner while fetching job + spinner := ux.NewSpinner(&ux.SpinnerOptions{ + Text: "Fine-Tuning Job Details", + }) + if err := spinner.Start(ctx); err != nil { + fmt.Printf("failed to start spinner: %v\n", err) + } + + fineTuneSvc, err := services.NewFineTuningService(ctx, azdClient, nil) + if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() + return err + } + + job, err := fineTuneSvc.GetFineTuningJobDetails(ctx, jobID) + _ = spinner.Stop(ctx) + fmt.Print("\n\n") + if err != nil { + return err + } + + switch output { + case "json": + utils.PrintObject(job, utils.FormatJSON) + case "yaml": + utils.PrintObject(job, utils.FormatYAML) + default: + views := job.ToDetailViews() + utils.PrintObjectWithIndent(views.Details, utils.FormatTable, " ") + + fmt.Println("\nTimestamps:") + utils.PrintObjectWithIndent(views.Timestamps, utils.FormatTable, " ") + + fmt.Println("\nConfiguration:") + utils.PrintObjectWithIndent(views.Configuration, utils.FormatTable, " ") + + fmt.Println("\nData:") + utils.PrintObjectWithIndent(views.Data, utils.FormatTable, " ") + } + + if logs { + fmt.Println() + // Fetch and print events + eventsSpinner := ux.NewSpinner(&ux.SpinnerOptions{ + Text: "Events:", + }) + if err := eventsSpinner.Start(ctx); err != nil { + fmt.Printf("failed to start spinner: %v\n", err) + } + + events, err := fineTuneSvc.GetJobEvents(ctx, jobID) + _ = eventsSpinner.Stop(ctx) + fmt.Println() + + if err != nil { + return err + } else if events != nil && len(events.Data) > 0 { + for _, event := range events.Data { + fmt.Printf(" [%s] %s\n", utils.FormatTime(event.CreatedAt), event.Message) + } + if events.HasMore { + fmt.Println(" ... (more events available)") + } + } + } + // // Fetch and print checkpoints if job is completed + // if job.Status == models.StatusSucceeded { + // checkpointsSpinner := ux.NewSpinner(&ux.SpinnerOptions{ + // Text: "Fetching job checkpoints...", + // }) + // if err := checkpointsSpinner.Start(ctx); err != nil { + // fmt.Printf("failed to start spinner: %v\n", err) + // } + + // checkpoints, err := fineTuneSvc.GetJobCheckpoints(ctx, jobID) + // _ = checkpointsSpinner.Stop(ctx) + + // if err != nil { + // fmt.Println() + // return err + // } else if checkpoints != nil && len(checkpoints.Data) > 0 { + // fmt.Println("\nJob Checkpoints:") + // for i, checkpoint := range checkpoints.Data { + // fmt.Printf(" %d. Checkpoint ID: %s\n", i+1, checkpoint.ID) + // fmt.Printf(" Checkpoint Name: %s\n", checkpoint.FineTunedModelCheckpoint) + // fmt.Printf(" Created On: %s\n", utils.FormatTime(checkpoint.CreatedAt)) + // fmt.Printf(" Step Number: %d\n", checkpoint.StepNumber) + // if checkpoint.Metrics != nil { + // fmt.Printf(" Full Validation Loss: %.6f\n", checkpoint.Metrics.FullValidLoss) + // } + // } + // if checkpoints.HasMore { + // fmt.Println(" ... (more checkpoints available)") + // } + // } + // } + + // fmt.Println(strings.Repeat("=", 120)) + + return nil + }, + } + + cmd.Flags().StringVarP(&jobID, "id", "i", "", "Job ID") + cmd.Flags().BoolVar(&logs, "logs", false, "Include recent training logs") + cmd.Flags().StringVarP(&output, "output", "o", "table", "Output format: table, json, yaml") + cmd.MarkFlagRequired("id") + + return cmd +} + +// newOperationListCommand creates a command to list fine-tuning jobs +func newOperationListCommand() *cobra.Command { + var limit int + var after string + var output string + cmd := &cobra.Command{ + Use: "list", + Short: "List fine-tuning jobs.", + RunE: func(cmd *cobra.Command, args []string) error { + ctx := azdext.WithAccessToken(cmd.Context()) + azdClient, err := azdext.NewAzdClient() + if err != nil { + return fmt.Errorf("failed to create azd client: %w", err) + } + defer azdClient.Close() + + // Show spinner while fetching jobs + spinner := ux.NewSpinner(&ux.SpinnerOptions{ + Text: "Fine-tuning Jobs", + }) + if err := spinner.Start(ctx); err != nil { + fmt.Printf("failed to start spinner: %v\n", err) + } + + fineTuneSvc, err := services.NewFineTuningService(ctx, azdClient, nil) + if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() + return err + } + + jobs, err := fineTuneSvc.ListFineTuningJobs(ctx, limit, after) + _ = spinner.Stop(ctx) + fmt.Print("\n\n") + + if err != nil { + return err + } + + switch output { + case "json": + utils.PrintObject(jobs, utils.FormatJSON) + default: + utils.PrintObject(jobs, utils.FormatTable) + } + return nil + }, + } + + cmd.Flags().IntVarP(&limit, "top", "t", 10, "Number of jobs to return") + cmd.Flags().StringVar(&after, "after", "", "Pagination cursor") + cmd.Flags().StringVarP(&output, "output", "o", "table", "Output format: table, json") + return cmd +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/root.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/root.go new file mode 100644 index 00000000000..b111f07e8ea --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/root.go @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package cmd + +import ( + "github.com/spf13/cobra" +) + +type rootFlagsDefinition struct { + Debug bool + NoPrompt bool +} + +// Enable access to the global command flags +var rootFlags rootFlagsDefinition + +func NewRootCommand() *cobra.Command { + rootCmd := &cobra.Command{ + Use: "finetuning [options]", + Short: "Extension for Foundry Fine Tuning. (Preview)", + SilenceUsage: true, + SilenceErrors: true, + CompletionOptions: cobra.CompletionOptions{ + DisableDefaultCmd: true, + }, + } + + rootCmd.SetHelpCommand(&cobra.Command{Hidden: true}) + rootCmd.PersistentFlags().BoolVar( + &rootFlags.Debug, + "debug", + false, + "Enable debug mode", + ) + + // Adds support for `--no-prompt` global flag in azd. + // Without this the extension command will error when the flag is provided. + rootCmd.PersistentFlags().BoolVar( + &rootFlags.NoPrompt, + "no-prompt", + false, + "accepts the default value instead of prompting, or fails if there is no default", + ) + + // rootCmd.AddCommand(newListenCommand()) + rootCmd.AddCommand(newVersionCommand()) + rootCmd.AddCommand(newInitCommand(rootFlags)) + rootCmd.AddCommand(newOperationCommand()) + + return rootCmd +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/validation.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/validation.go new file mode 100644 index 00000000000..ab9fdca036a --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/validation.go @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package cmd + +import ( + "context" + "fmt" + + "azure.ai.finetune/internal/utils" + "github.com/azure/azure-dev/cli/azd/pkg/azdext" +) + +func validateEnvironment(ctx context.Context) error { + ctx = azdext.WithAccessToken(ctx) + + azdClient, err := azdext.NewAzdClient() + if err != nil { + return err + } + defer azdClient.Close() + + envValues, _ := utils.GetEnvironmentValues(ctx, azdClient) + required := []string{utils.EnvAzureTenantID, utils.EnvAzureSubscriptionID, utils.EnvAzureLocation, utils.EnvAzureAccountName} + + for _, varName := range required { + if envValues[varName] == "" { + return fmt.Errorf("required environment variables not set. Please run 'azd ai finetune init' command to configure your environment") + } + } + return nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/version.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/version.go new file mode 100644 index 00000000000..715323a6c5c --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/version.go @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +var ( + // Populated at build time + Version = "dev" // Default value for development builds + Commit = "none" + BuildDate = "unknown" +) + +func newVersionCommand() *cobra.Command { + return &cobra.Command{ + Use: "version", + Short: "Prints the version of the application", + Run: func(cmd *cobra.Command, args []string) { + fmt.Printf("Version: %s\nCommit: %s\nBuild Date: %s\n", Version, Commit, BuildDate) + }, + } +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/azure/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/azure/provider.go new file mode 100644 index 00000000000..46fdd92e358 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/azure/provider.go @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package azure + +import ( + "context" + + "azure.ai.finetune/pkg/models" + "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/cognitiveservices/armcognitiveservices" +) + +// AzureProvider implements the provider interface for Azure APIs +type AzureProvider struct { + clientFactory *armcognitiveservices.ClientFactory +} + +// NewAzureProvider creates a new Azure provider instance +func NewAzureProvider(clientFactory *armcognitiveservices.ClientFactory) *AzureProvider { + return &AzureProvider{ + clientFactory: clientFactory, + } +} + +// CreateFineTuningJob creates a new fine-tuning job via Azure OpenAI API +func (p *AzureProvider) CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) { + // TODO: Implement + // 1. Convert domain model to Azure SDK format + // 2. Call Azure SDK CreateFineTuningJob + // 3. Convert Azure response to domain model + return nil, nil +} + +// GetFineTuningStatus retrieves the status of a fine-tuning job +func (p *AzureProvider) GetFineTuningStatus(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// ListFineTuningJobs lists all fine-tuning jobs +func (p *AzureProvider) ListFineTuningJobs(ctx context.Context, limit int, after string) ([]*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// GetFineTuningJobDetails retrieves detailed information about a job +func (p *AzureProvider) GetFineTuningJobDetails(ctx context.Context, jobID string) (*models.FineTuningJobDetail, error) { + // TODO: Implement + return nil, nil +} + +// GetJobEvents retrieves events for a fine-tuning job +func (p *AzureProvider) GetJobEvents(ctx context.Context, jobID string, limit int, after string) (*models.JobEventsList, error) { + // TODO: Implement + return nil, nil +} + +// GetJobCheckpoints retrieves checkpoints for a fine-tuning job +func (p *AzureProvider) GetJobCheckpoints(ctx context.Context, jobID string, limit int, after string) (*models.JobCheckpointsList, error) { + // TODO: Implement + return nil, nil +} + +// PauseJob pauses a fine-tuning job +func (p *AzureProvider) PauseJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// ResumeJob resumes a paused fine-tuning job +func (p *AzureProvider) ResumeJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// CancelJob cancels a fine-tuning job +func (p *AzureProvider) CancelJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// UploadFile uploads a file for fine-tuning +func (p *AzureProvider) UploadFile(ctx context.Context, filePath string) (string, error) { + // TODO: Implement + return "", nil +} + +// GetUploadedFile retrieves information about an uploaded file +func (p *AzureProvider) GetUploadedFile(ctx context.Context, fileID string) (interface{}, error) { + // TODO: Implement + return nil, nil +} + +// DeployModel deploys a fine-tuned or base model via Azure Cognitive Services +func (p *AzureProvider) DeployModel(ctx context.Context, req *models.DeploymentRequest) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// GetDeploymentStatus retrieves the status of a deployment +func (p *AzureProvider) GetDeploymentStatus(ctx context.Context, deploymentID string) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// ListDeployments lists all deployments +func (p *AzureProvider) ListDeployments(ctx context.Context, limit int, after string) ([]*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// UpdateDeployment updates deployment configuration +func (p *AzureProvider) UpdateDeployment(ctx context.Context, deploymentID string, capacity int32) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// DeleteDeployment deletes a deployment +func (p *AzureProvider) DeleteDeployment(ctx context.Context, deploymentID string) error { + // TODO: Implement + return nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/factory/provider_factory.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/factory/provider_factory.go new file mode 100644 index 00000000000..969bec2f43c --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/factory/provider_factory.go @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package factory + +import ( + "context" + "fmt" + "net/http" + + "azure.ai.finetune/internal/providers" + azureprovider "azure.ai.finetune/internal/providers/azure" + openaiprovider "azure.ai.finetune/internal/providers/openai" + "azure.ai.finetune/internal/utils" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime" + "github.com/Azure/azure-sdk-for-go/sdk/azidentity" + "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/cognitiveservices/armcognitiveservices" + "github.com/azure/azure-dev/cli/azd/pkg/azdext" + "github.com/openai/openai-go/v3" + "github.com/openai/openai-go/v3/option" +) + +const ( + // OpenAI API version for Azure cognitive services + DefaultApiVersion = "2025-11-15-preview" + // Azure cognitive services endpoint URL pattern + DefaultCognitiveServicesEndpoint = "https://%s.services.ai.azure.com/api/projects/%s" + DefaultAzureFinetuningScope = "https://ai.azure.com/.default" +) + +func GetOpenAIClientFromAzdClient(ctx context.Context, azdClient *azdext.AzdClient) (*openai.Client, error) { + envValueMap, err := utils.GetEnvironmentValues(ctx, azdClient) + if err != nil { + return nil, fmt.Errorf("failed to get environment values: %w", err) + } + + azureContext := &azdext.AzureContext{ + Scope: &azdext.AzureScope{ + TenantId: envValueMap[utils.EnvAzureTenantID], + SubscriptionId: envValueMap[utils.EnvAzureSubscriptionID], + Location: envValueMap[utils.EnvAzureLocation], + }, + Resources: []string{}, + } + + credential, err := azidentity.NewAzureDeveloperCLICredential(&azidentity.AzureDeveloperCLICredentialOptions{ + TenantID: azureContext.Scope.TenantId, + AdditionallyAllowedTenants: []string{"*"}, + }) + if err != nil { + return nil, fmt.Errorf("failed to create azure credential: %w", err) + } + + // Get Azure credentials and endpoint - TODO + // You'll need to get these from your environment or config + accountName := envValueMap[utils.EnvAzureAccountName] + projectName := envValueMap[utils.EnvAzureOpenAIProjectName] + endpoint := envValueMap[utils.EnvFinetuningRoute] + if endpoint == "" { + endpoint = fmt.Sprintf(DefaultCognitiveServicesEndpoint, accountName, projectName) + } + + apiVersion := envValueMap[utils.EnvAPIVersion] + if apiVersion == "" { + apiVersion = DefaultApiVersion + } + + scope := envValueMap[utils.EnvFinetuningTokenScope] + if scope == "" { + scope = DefaultAzureFinetuningScope + } + // Create OpenAI client + client := openai.NewClient( + //azure.WithEndpoint(endpoint, apiVersion), + option.WithBaseURL(endpoint), + option.WithQuery("api-version", apiVersion), + WithTokenCredential(credential, scope), + ) + return &client, nil +} + +// WithTokenCredential configures this client to authenticate using an [Azure Identity] TokenCredential. +// This function should be paired with a call to [WithEndpoint] to point to your Azure OpenAI instance. +// +// [Azure Identity]: https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity +func WithTokenCredential(tokenCredential azcore.TokenCredential, scope string) option.RequestOption { + bearerTokenPolicy := runtime.NewBearerTokenPolicy(tokenCredential, []string{scope}, nil) + // add in a middleware that uses the bearer token generated from the token credential + return option.WithMiddleware(func(req *http.Request, next option.MiddlewareNext) (*http.Response, error) { + pipeline := runtime.NewPipeline("finetune-extensions", version, runtime.PipelineOptions{}, &policy.ClientOptions{ + InsecureAllowCredentialWithHTTP: true, // allow for plain HTTP proxies, etc.. + PerRetryPolicies: []policy.Policy{ + bearerTokenPolicy, + policyAdapter(next), + }, + }) + + req2, err := runtime.NewRequestFromRequest(req) + + if err != nil { + return nil, err + } + + return pipeline.Do(req2) + }) +} + +// NewFineTuningProvider creates a FineTuningProvider based on provider type +func NewFineTuningProvider(ctx context.Context, azdClient *azdext.AzdClient) (providers.FineTuningProvider, error) { + client, err := GetOpenAIClientFromAzdClient(ctx, azdClient) + return openaiprovider.NewOpenAIProvider(client), err +} + +// NewModelDeploymentProvider creates a ModelDeploymentProvider based on provider type +func NewModelDeploymentProvider(subscriptionId string, credential azcore.TokenCredential) (providers.ModelDeploymentProvider, error) { + clientFactory, err := armcognitiveservices.NewClientFactory( + subscriptionId, + credential, + nil, + ) + if err != nil { + return nil, fmt.Errorf("failed to create armcognitiveservices client factory: %w", err) + } + return azureprovider.NewAzureProvider(clientFactory), err +} + +type policyAdapter option.MiddlewareNext + +func (mp policyAdapter) Do(req *policy.Request) (*http.Response, error) { + return (option.MiddlewareNext)(mp)(req.Raw()) +} + +const version = "v.0.1.0" diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/interface.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/interface.go new file mode 100644 index 00000000000..d2aeb2df163 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/interface.go @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package providers + +import ( + "context" + + "azure.ai.finetune/pkg/models" +) + +// FineTuningProvider defines the interface for fine-tuning operations +// All providers (OpenAI, Azure, Anthropic, etc.) must implement this interface +type FineTuningProvider interface { + // CreateFineTuningJob creates a new fine-tuning job + CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) + + // GetFineTuningStatus retrieves the status of a fine-tuning job + GetFineTuningStatus(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // ListFineTuningJobs lists all fine-tuning jobs + ListFineTuningJobs(ctx context.Context, limit int, after string) ([]*models.FineTuningJob, error) + + // GetFineTuningJobDetails retrieves detailed information about a job + GetFineTuningJobDetails(ctx context.Context, jobID string) (*models.FineTuningJobDetail, error) + + // GetJobEvents retrieves events for a fine-tuning job + GetJobEvents(ctx context.Context, jobID string) (*models.JobEventsList, error) + + // GetJobCheckpoints retrieves checkpoints for a fine-tuning job + GetJobCheckpoints(ctx context.Context, jobID string) (*models.JobCheckpointsList, error) + + // PauseJob pauses a fine-tuning job + PauseJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // ResumeJob resumes a paused fine-tuning job + ResumeJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // CancelJob cancels a fine-tuning job + CancelJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // UploadFile uploads a file for fine-tuning + UploadFile(ctx context.Context, filePath string) (string, error) + + // GetUploadedFile retrieves information about an uploaded file + GetUploadedFile(ctx context.Context, fileID string) (interface{}, error) +} + +// ModelDeploymentProvider defines the interface for model deployment operations +// All providers must implement this interface for deployment functionality +type ModelDeploymentProvider interface { + // DeployModel deploys a fine-tuned or base model + DeployModel(ctx context.Context, req *models.DeploymentRequest) (*models.Deployment, error) + + // GetDeploymentStatus retrieves the status of a deployment + GetDeploymentStatus(ctx context.Context, deploymentID string) (*models.Deployment, error) + + // ListDeployments lists all deployments + ListDeployments(ctx context.Context, limit int, after string) ([]*models.Deployment, error) + + // UpdateDeployment updates deployment configuration + UpdateDeployment(ctx context.Context, deploymentID string, capacity int32) (*models.Deployment, error) + + // DeleteDeployment deletes a deployment + DeleteDeployment(ctx context.Context, deploymentID string) error +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go new file mode 100644 index 00000000000..7a71d3ac3a0 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go @@ -0,0 +1,453 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package openai + +import ( + "encoding/json" + "strings" + "time" + + "github.com/openai/openai-go/v3" + "github.com/openai/openai-go/v3/packages/pagination" + + "azure.ai.finetune/internal/utils" + "azure.ai.finetune/pkg/models" +) + +// OpenAI Status Constants - matches OpenAI SDK values +const ( + OpenAIStatusValidatingFiles = "validating_files" + OpenAIStatusQueued = "queued" + OpenAIStatusRunning = "running" + OpenAIStatusSucceeded = "succeeded" + OpenAIStatusFailed = "failed" + OpenAIStatusCancelled = "cancelled" +) + +// mapOpenAIStatusToJobStatus converts OpenAI SDK status to domain model JobStatus +func mapOpenAIStatusToJobStatus(openaiStatus openai.FineTuningJobStatus) models.JobStatus { + switch openaiStatus { + case OpenAIStatusValidatingFiles, OpenAIStatusRunning: + return models.StatusRunning + case OpenAIStatusQueued: + return models.StatusQueued + case OpenAIStatusSucceeded: + return models.StatusSucceeded + case OpenAIStatusFailed: + return models.StatusFailed + case OpenAIStatusCancelled: + return models.StatusCancelled + default: + return models.StatusPending // Default fallback + } +} + +// convertOpenAIJobToModel converts OpenAI SDK job to domain model +func convertOpenAIJobToModel(openaiJob openai.FineTuningJob) *models.FineTuningJob { + return &models.FineTuningJob{ + ID: openaiJob.ID, + Status: mapOpenAIStatusToJobStatus(openaiJob.Status), + BaseModel: openaiJob.Model, + FineTunedModel: openaiJob.FineTunedModel, + CreatedAt: utils.UnixTimestampToUTC(openaiJob.CreatedAt), + Duration: models.Duration(calculateDuration(openaiJob.CreatedAt, openaiJob.FinishedAt)), + } +} + +func calculateDuration(createdAt, finishedAt int64) time.Duration { + if finishedAt > 0 { + return time.Duration(finishedAt-createdAt) * time.Second + } + return 0 +} + +// convertOpenAIJobToDetailModel converts OpenAI SDK job to detailed domain model +func convertOpenAIJobToDetailModel(openaiJob *openai.FineTuningJob) *models.FineTuningJobDetail { + // Extract hyperparameters from OpenAI job + hyperparameters := &models.Hyperparameters{} + if openaiJob.Method.Type == "supervised" { + hyperparameters.BatchSize = openaiJob.Method.Supervised.Hyperparameters.BatchSize.OfInt + hyperparameters.LearningRateMultiplier = openaiJob.Method.Supervised.Hyperparameters.LearningRateMultiplier.OfFloat + hyperparameters.NEpochs = openaiJob.Method.Supervised.Hyperparameters.NEpochs.OfInt + } else if openaiJob.Method.Type == "dpo" { + hyperparameters.BatchSize = openaiJob.Method.Dpo.Hyperparameters.BatchSize.OfInt + hyperparameters.LearningRateMultiplier = openaiJob.Method.Dpo.Hyperparameters.LearningRateMultiplier.OfFloat + hyperparameters.NEpochs = openaiJob.Method.Dpo.Hyperparameters.NEpochs.OfInt + hyperparameters.Beta = openaiJob.Method.Dpo.Hyperparameters.Beta.OfFloat + } else if openaiJob.Method.Type == "reinforcement" { + hyperparameters.BatchSize = openaiJob.Method.Reinforcement.Hyperparameters.BatchSize.OfInt + hyperparameters.LearningRateMultiplier = openaiJob.Method.Reinforcement.Hyperparameters.LearningRateMultiplier.OfFloat + hyperparameters.NEpochs = openaiJob.Method.Reinforcement.Hyperparameters.NEpochs.OfInt + hyperparameters.ComputeMultiplier = openaiJob.Method.Reinforcement.Hyperparameters.ComputeMultiplier.OfFloat + hyperparameters.EvalInterval = openaiJob.Method.Reinforcement.Hyperparameters.EvalInterval.OfInt + hyperparameters.EvalSamples = openaiJob.Method.Reinforcement.Hyperparameters.EvalSamples.OfInt + if openaiJob.Method.Reinforcement.Hyperparameters.ReasoningEffort != "" { + hyperparameters.ReasoningEffort = string(openaiJob.Method.Reinforcement.Hyperparameters.ReasoningEffort) + } + + } else { + // Fallback to top-level hyperparameters (for backward compatibility) + hyperparameters.BatchSize = openaiJob.Hyperparameters.BatchSize.OfInt + hyperparameters.LearningRateMultiplier = openaiJob.Hyperparameters.LearningRateMultiplier.OfFloat + hyperparameters.NEpochs = openaiJob.Hyperparameters.NEpochs.OfInt + } + + status := mapOpenAIStatusToJobStatus(openaiJob.Status) + + // Only set FinishedAt for terminal states + var finishedAt *time.Time + if utils.IsTerminalStatus(status) && openaiJob.FinishedAt > 0 { + t := utils.UnixTimestampToUTC(openaiJob.FinishedAt) + finishedAt = &t + } + + // Only set EstimatedFinish for non-terminal states + var estimatedFinish *time.Time + if !utils.IsTerminalStatus(status) && openaiJob.EstimatedFinish > 0 { + t := utils.UnixTimestampToUTC(openaiJob.EstimatedFinish) + estimatedFinish = &t + } + + jobDetail := &models.FineTuningJobDetail{ + ID: openaiJob.ID, + Status: status, + Model: openaiJob.Model, + FineTunedModel: openaiJob.FineTunedModel, + CreatedAt: utils.UnixTimestampToUTC(openaiJob.CreatedAt), + FinishedAt: finishedAt, + EstimatedFinish: estimatedFinish, + Method: openaiJob.Method.Type, + TrainingFile: openaiJob.TrainingFile, + ValidationFile: openaiJob.ValidationFile, + Hyperparameters: hyperparameters, + Seed: openaiJob.Seed, + } + + return jobDetail +} + +// convertOpenAIJobEventsToModel converts OpenAI SDK job events to domain model +func convertOpenAIJobEventsToModel(eventsPage *pagination.CursorPage[openai.FineTuningJobEvent]) *models.JobEventsList { + var events []models.JobEvent + for _, event := range eventsPage.Data { + jobEvent := models.JobEvent{ + ID: event.ID, + CreatedAt: utils.UnixTimestampToUTC(event.CreatedAt), + Level: string(event.Level), + Message: event.Message, + Data: event.Data, + Type: string(event.Type), + } + events = append(events, jobEvent) + } + + return &models.JobEventsList{ + Data: events, + HasMore: eventsPage.HasMore, + } +} + +// convertOpenAIJobCheckpointsToModel converts OpenAI SDK job checkpoints to domain model +func convertOpenAIJobCheckpointsToModel(checkpointsPage *pagination.CursorPage[openai.FineTuningJobCheckpoint]) *models.JobCheckpointsList { + var checkpoints []models.JobCheckpoint + + for _, checkpoint := range checkpointsPage.Data { + metrics := &models.CheckpointMetrics{ + FullValidLoss: checkpoint.Metrics.FullValidLoss, + FullValidMeanTokenAccuracy: checkpoint.Metrics.FullValidMeanTokenAccuracy, + } + + jobCheckpoint := models.JobCheckpoint{ + ID: checkpoint.ID, + CreatedAt: utils.UnixTimestampToUTC(checkpoint.CreatedAt), + FineTunedModelCheckpoint: checkpoint.FineTunedModelCheckpoint, + Metrics: metrics, + FineTuningJobID: checkpoint.FineTuningJobID, + StepNumber: checkpoint.StepNumber, + } + checkpoints = append(checkpoints, jobCheckpoint) + } + + return &models.JobCheckpointsList{ + Data: checkpoints, + HasMore: checkpointsPage.HasMore, + } +} + +// Converts the internal create finetuning request model to OpenAI job parameters +func convertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningRequest) (*openai.FineTuningJobNewParams, error) { + jobParams := openai.FineTuningJobNewParams{ + Model: openai.FineTuningJobNewParamsModel(config.BaseModel), + TrainingFile: config.TrainingFile, + } + + if config.ValidationFile != nil && *config.ValidationFile != "" { + jobParams.ValidationFile = openai.String(*config.ValidationFile) + } + + // Set optional fields + if config.Suffix != nil && *config.Suffix != "" { + jobParams.Suffix = openai.String(*config.Suffix) + } + + if config.Seed != nil { + jobParams.Seed = openai.Int(*config.Seed) + } + + // Set metadata if provided + if len(config.Metadata) > 0 { + jobParams.Metadata = make(map[string]string) + for k, v := range config.Metadata { + jobParams.Metadata[k] = v + } + } + + // Set hyperparameters if provided + if config.Method.Type == "supervised" && config.Method.Supervised != nil { + hp := config.Method.Supervised.Hyperparameters + supervisedMethod := openai.SupervisedMethodParam{ + Hyperparameters: openai.SupervisedHyperparameters{}, + } + + if hp.BatchSize != nil { + if batchSize := convertHyperparameterToInt(hp.BatchSize); batchSize != nil { + supervisedMethod.Hyperparameters.BatchSize = openai.SupervisedHyperparametersBatchSizeUnion{ + OfInt: openai.Int(*batchSize), + } + } + } + + if hp.LearningRateMultiplier != nil { + if lr := convertHyperparameterToFloat(hp.LearningRateMultiplier); lr != nil { + supervisedMethod.Hyperparameters.LearningRateMultiplier = openai.SupervisedHyperparametersLearningRateMultiplierUnion{ + OfFloat: openai.Float(*lr), + } + } + } + + if hp.Epochs != nil { + if epochs := convertHyperparameterToInt(hp.Epochs); epochs != nil { + supervisedMethod.Hyperparameters.NEpochs = openai.SupervisedHyperparametersNEpochsUnion{ + OfInt: openai.Int(*epochs), + } + } + } + + jobParams.Method = openai.FineTuningJobNewParamsMethod{ + Type: "supervised", + Supervised: supervisedMethod, + } + + } else if config.Method.Type == "dpo" && config.Method.DPO != nil { + hp := config.Method.DPO.Hyperparameters + dpoMethod := openai.DpoMethodParam{ + Hyperparameters: openai.DpoHyperparameters{}, + } + + if hp.BatchSize != nil { + if batchSize := convertHyperparameterToInt(hp.BatchSize); batchSize != nil { + dpoMethod.Hyperparameters.BatchSize = openai.DpoHyperparametersBatchSizeUnion{ + OfInt: openai.Int(*batchSize), + } + } + } + + if hp.LearningRateMultiplier != nil { + if lr := convertHyperparameterToFloat(hp.LearningRateMultiplier); lr != nil { + dpoMethod.Hyperparameters.LearningRateMultiplier = openai.DpoHyperparametersLearningRateMultiplierUnion{ + OfFloat: openai.Float(*lr), + } + } + } + + if hp.Epochs != nil { + if epochs := convertHyperparameterToInt(hp.Epochs); epochs != nil { + dpoMethod.Hyperparameters.NEpochs = openai.DpoHyperparametersNEpochsUnion{ + OfInt: openai.Int(*epochs), + } + } + } + + if hp.Beta != nil { + if beta := convertHyperparameterToFloat(hp.Beta); beta != nil { + dpoMethod.Hyperparameters.Beta = openai.DpoHyperparametersBetaUnion{ + OfFloat: openai.Float(*beta), + } + } + } + + jobParams.Method = openai.FineTuningJobNewParamsMethod{ + Type: "dpo", + Dpo: dpoMethod, + } + + } else if config.Method.Type == "reinforcement" && config.Method.Reinforcement != nil { + hp := config.Method.Reinforcement.Hyperparameters + reinforcementMethod := openai.ReinforcementMethodParam{ + Hyperparameters: openai.ReinforcementHyperparameters{}, + } + + if hp.BatchSize != nil { + if batchSize := convertHyperparameterToInt(hp.BatchSize); batchSize != nil { + reinforcementMethod.Hyperparameters.BatchSize = openai.ReinforcementHyperparametersBatchSizeUnion{ + OfInt: openai.Int(*batchSize), + } + } + } + + if hp.LearningRateMultiplier != nil { + if lr := convertHyperparameterToFloat(hp.LearningRateMultiplier); lr != nil { + reinforcementMethod.Hyperparameters.LearningRateMultiplier = openai.ReinforcementHyperparametersLearningRateMultiplierUnion{ + OfFloat: openai.Float(*lr), + } + } + } + + if hp.Epochs != nil { + if epochs := convertHyperparameterToInt(hp.Epochs); epochs != nil { + reinforcementMethod.Hyperparameters.NEpochs = openai.ReinforcementHyperparametersNEpochsUnion{ + OfInt: openai.Int(*epochs), + } + } + } + + if hp.ComputeMultiplier != nil { + if compute := convertHyperparameterToFloat(hp.ComputeMultiplier); compute != nil { + reinforcementMethod.Hyperparameters.ComputeMultiplier = openai.ReinforcementHyperparametersComputeMultiplierUnion{ + OfFloat: openai.Float(*compute), + } + } + } + + if hp.EvalInterval != nil { + if evalSteps := convertHyperparameterToInt(hp.EvalInterval); evalSteps != nil { + reinforcementMethod.Hyperparameters.EvalInterval = openai.ReinforcementHyperparametersEvalIntervalUnion{ + OfInt: openai.Int(*evalSteps), + } + } + } + + if hp.EvalSamples != nil { + if evalSamples := convertHyperparameterToInt(hp.EvalSamples); evalSamples != nil { + reinforcementMethod.Hyperparameters.EvalSamples = openai.ReinforcementHyperparametersEvalSamplesUnion{ + OfInt: openai.Int(*evalSamples), + } + } + } + + if hp.ReasoningEffort != "" { + reinforcementMethod.Hyperparameters.ReasoningEffort = getReasoningEffortValue(hp.ReasoningEffort) + } + + grader := config.Method.Reinforcement.Grader + if grader != nil { + // Convert grader to JSON and unmarshal to ReinforcementMethodGraderUnionParam + graderJSON, err := json.Marshal(grader) + if err != nil { + return nil, err + } + + var graderUnion openai.ReinforcementMethodGraderUnionParam + err = json.Unmarshal(graderJSON, &graderUnion) + if err != nil { + return nil, err + } + reinforcementMethod.Grader = graderUnion + } + + jobParams.Method = openai.FineTuningJobNewParamsMethod{ + Type: "reinforcement", + Reinforcement: reinforcementMethod, + } + } + + // Set integrations if provided + if len(config.Integrations) > 0 { + var integrations []openai.FineTuningJobNewParamsIntegration + + for _, integration := range config.Integrations { + if integration.Type == "" || integration.Type == "wandb" { + + wandbConfigJSON, err := json.Marshal(integration.Config) + if err != nil { + return nil, err + } + + var wandbConfig openai.FineTuningJobNewParamsIntegrationWandb + err = json.Unmarshal(wandbConfigJSON, &wandbConfig) + if err != nil { + return nil, err + } + integrations = append(integrations, openai.FineTuningJobNewParamsIntegration{ + Type: "wandb", + Wandb: wandbConfig, + }) + } + } + + if len(integrations) > 0 { + jobParams.Integrations = integrations + } + } + + return &jobParams, nil +} + +// convertHyperparameterToInt converts interface{} hyperparameter to *int64 +func convertHyperparameterToInt(value interface{}) *int64 { + if value == nil { + return nil + } + switch v := value.(type) { + case int: + val := int64(v) + return &val + case int64: + return &v + case float64: + val := int64(v) + return &val + case string: + // "auto" string handled separately + return nil + default: + return nil + } +} + +// convertHyperparameterToFloat converts interface{} hyperparameter to *float64 +func convertHyperparameterToFloat(value interface{}) *float64 { + if value == nil { + return nil + } + switch v := value.(type) { + case int: + val := float64(v) + return &val + case int64: + val := float64(v) + return &val + case float64: + return &v + case string: + // "auto" string handled separately + return nil + default: + return nil + } +} + +func getReasoningEffortValue(effort string) openai.ReinforcementHyperparametersReasoningEffort { + + switch strings.ToLower(effort) { + case "low": + return openai.ReinforcementHyperparametersReasoningEffortLow + case "medium": + return openai.ReinforcementHyperparametersReasoningEffortMedium + case "high": + return openai.ReinforcementHyperparametersReasoningEffortHigh + default: + return openai.ReinforcementHyperparametersReasoningEffortDefault + } +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go new file mode 100644 index 00000000000..5ed9c1404a9 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go @@ -0,0 +1,225 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package openai + +import ( + "context" + "fmt" + "os" + "time" + + "azure.ai.finetune/pkg/models" + "github.com/azure/azure-dev/cli/azd/pkg/ux" + "github.com/fatih/color" + "github.com/openai/openai-go/v3" +) + +// OpenAIProvider implements the provider interface for OpenAI APIs +type OpenAIProvider struct { + client *openai.Client +} + +// NewOpenAIProvider creates a new OpenAI provider instance +func NewOpenAIProvider(client *openai.Client) *OpenAIProvider { + return &OpenAIProvider{ + client: client, + } +} + +// CreateFineTuningJob creates a new fine-tuning job via OpenAI API +func (p *OpenAIProvider) CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) { + + params, err := convertInternalJobParamToOpenAiJobParams(req) + if err != nil { + return nil, fmt.Errorf("failed to convert internal model to openai: %w", err) + } + + job, err := p.client.FineTuning.Jobs.New(ctx, *params) + if err != nil { + return nil, fmt.Errorf("failed to create fine-tuning job: %w", err) + } + + return convertOpenAIJobToModel(*job), nil +} + +// GetFineTuningStatus retrieves the status of a fine-tuning job +func (p *OpenAIProvider) GetFineTuningStatus(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// ListFineTuningJobs lists all fine-tuning jobs +func (p *OpenAIProvider) ListFineTuningJobs(ctx context.Context, limit int, after string) ([]*models.FineTuningJob, error) { + jobList, err := p.client.FineTuning.Jobs.List(ctx, openai.FineTuningJobListParams{ + Limit: openai.Int(int64(limit)), // optional pagination control + After: openai.String(after), + }) + + if err != nil { + return nil, err + } + + var jobs []*models.FineTuningJob + + for _, job := range jobList.Data { + finetuningJob := convertOpenAIJobToModel(job) + jobs = append(jobs, finetuningJob) + } + + return jobs, nil +} + +// GetFineTuningJobDetails retrieves detailed information about a job +func (p *OpenAIProvider) GetFineTuningJobDetails(ctx context.Context, jobID string) (*models.FineTuningJobDetail, error) { + job, err := p.client.FineTuning.Jobs.Get(ctx, jobID) + if err != nil { + return nil, err + } + finetuningJobDetail := convertOpenAIJobToDetailModel(job) + + return finetuningJobDetail, nil +} + +// GetJobEvents retrieves events for a fine-tuning job +func (p *OpenAIProvider) GetJobEvents(ctx context.Context, jobID string) (*models.JobEventsList, error) { + eventsPage, err := p.client.FineTuning.Jobs.ListEvents( + ctx, + jobID, + openai.FineTuningJobListEventsParams{}, + ) + if err != nil { + return nil, err + } + + events := convertOpenAIJobEventsToModel(eventsPage) + + return events, nil +} + +// GetJobCheckpoints retrieves checkpoints for a fine-tuning job +func (p *OpenAIProvider) GetJobCheckpoints(ctx context.Context, jobID string) (*models.JobCheckpointsList, error) { + checkpointsPage, err := p.client.FineTuning.Jobs.Checkpoints.List( + ctx, + jobID, + openai.FineTuningJobCheckpointListParams{}, + ) + if err != nil { + return nil, err + } + checkpoints := convertOpenAIJobCheckpointsToModel(checkpointsPage) + + return checkpoints, nil +} + +// PauseJob pauses a fine-tuning job +func (p *OpenAIProvider) PauseJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// ResumeJob resumes a paused fine-tuning job +func (p *OpenAIProvider) ResumeJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// CancelJob cancels a fine-tuning job +func (p *OpenAIProvider) CancelJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// UploadFile uploads a file for fine-tuning +func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (string, error) { + if filePath == "" { + return "", fmt.Errorf("file path cannot be empty") + } + + // Show spinner while creating job + spinner := ux.NewSpinner(&ux.SpinnerOptions{ + Text: "uploading the file for fine-tuning", + }) + if err := spinner.Start(ctx); err != nil { + fmt.Printf("failed to start spinner: %v\n", err) + } + + file, err := os.Open(filePath) + if err != nil { + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfailed to open file %s: %w", filePath, err) + } + defer file.Close() + + uploadedFile, err := p.client.Files.New(ctx, openai.FileNewParams{ + File: file, + Purpose: openai.FilePurposeFineTune, + }) + + if err != nil { + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfailed to upload file: %w", err) + } + + if uploadedFile == nil || uploadedFile.ID == "" { + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nuploaded file is empty") + } + + // Poll for file processing status + for { + f, err := p.client.Files.Get(ctx, uploadedFile.ID) + if err != nil { + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfailed to check file status: %w", err) + } + if f.Status == openai.FileObjectStatusProcessed { + _ = spinner.Stop(ctx) + break + } + if f.Status == openai.FileObjectStatusError { + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfile processing failed with status: %s", f.Status) + } + color.Yellow(".") + time.Sleep(2 * time.Second) + } + + return uploadedFile.ID, nil +} + +// GetUploadedFile retrieves information about an uploaded file +func (p *OpenAIProvider) GetUploadedFile(ctx context.Context, fileID string) (interface{}, error) { + // TODO: Implement + return nil, nil +} + +// DeployModel deploys a fine-tuned or base model +func (p *OpenAIProvider) DeployModel(ctx context.Context, req *models.DeploymentRequest) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// GetDeploymentStatus retrieves the status of a deployment +func (p *OpenAIProvider) GetDeploymentStatus(ctx context.Context, deploymentID string) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// ListDeployments lists all deployments +func (p *OpenAIProvider) ListDeployments(ctx context.Context, limit int, after string) ([]*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// UpdateDeployment updates deployment configuration +func (p *OpenAIProvider) UpdateDeployment(ctx context.Context, deploymentID string, capacity int32) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// DeleteDeployment deletes a deployment +func (p *OpenAIProvider) DeleteDeployment(ctx context.Context, deploymentID string) error { + // TODO: Implement + return nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/deployment_service.go b/cli/azd/extensions/azure.ai.finetune/internal/services/deployment_service.go new file mode 100644 index 00000000000..aa5275df6f4 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/deployment_service.go @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package services + +import ( + "context" + + "azure.ai.finetune/internal/providers" + "azure.ai.finetune/pkg/models" +) + +// Ensure deploymentServiceImpl implements DeploymentService interface +var _ DeploymentService = (*deploymentServiceImpl)(nil) + +// deploymentServiceImpl implements the DeploymentService interface +type deploymentServiceImpl struct { + provider providers.ModelDeploymentProvider + stateStore StateStore +} + +// NewDeploymentService creates a new instance of DeploymentService +func NewDeploymentService(provider providers.ModelDeploymentProvider, stateStore StateStore) DeploymentService { + return &deploymentServiceImpl{ + provider: provider, + stateStore: stateStore, + } +} + +// DeployModel deploys a fine-tuned or base model with validation +func (s *deploymentServiceImpl) DeployModel(ctx context.Context, req *models.DeploymentRequest) (*models.Deployment, error) { + // TODO: Implement + // 1. Validate request (deployment name format, SKU valid, capacity valid, etc.) + // 2. Call provider.DeployModel() + // 3. Transform any errors to standardized ErrorDetail + // 4. Persist deployment to state store + // 5. Return deployment + return nil, nil +} + +// GetDeploymentStatus retrieves the current status of a deployment +func (s *deploymentServiceImpl) GetDeploymentStatus(ctx context.Context, deploymentID string) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// ListDeployments lists all deployments for the user +func (s *deploymentServiceImpl) ListDeployments(ctx context.Context, limit int, after string) ([]*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// UpdateDeployment updates deployment configuration (e.g., capacity) +func (s *deploymentServiceImpl) UpdateDeployment(ctx context.Context, deploymentID string, capacity int32) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} + +// DeleteDeployment deletes a deployment with proper validation +func (s *deploymentServiceImpl) DeleteDeployment(ctx context.Context, deploymentID string) error { + // TODO: Implement + return nil +} + +// WaitForDeployment waits for a deployment to become active +func (s *deploymentServiceImpl) WaitForDeployment(ctx context.Context, deploymentID string, timeoutSeconds int) (*models.Deployment, error) { + // TODO: Implement + return nil, nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go new file mode 100644 index 00000000000..b0e8e8d9690 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go @@ -0,0 +1,238 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package services + +import ( + "context" + "fmt" + "os" + + "azure.ai.finetune/internal/providers" + "azure.ai.finetune/internal/providers/factory" + "azure.ai.finetune/internal/utils" + "azure.ai.finetune/pkg/models" + "github.com/azure/azure-dev/cli/azd/pkg/azdext" + "github.com/fatih/color" +) + +// Ensure fineTuningServiceImpl implements FineTuningService interface +var _ FineTuningService = (*fineTuningServiceImpl)(nil) + +// fineTuningServiceImpl implements the FineTuningService interface +type fineTuningServiceImpl struct { + azdClient *azdext.AzdClient + provider providers.FineTuningProvider + stateStore StateStore +} + +// NewFineTuningService creates a new instance of FineTuningService +func NewFineTuningService(ctx context.Context, azdClient *azdext.AzdClient, stateStore StateStore) (FineTuningService, error) { + provider, err := factory.NewFineTuningProvider(ctx, azdClient) + if err != nil { + return nil, fmt.Errorf("failed to initialize fine-tuning service: %w", err) + } + + return &fineTuningServiceImpl{ + azdClient: azdClient, + provider: provider, + stateStore: stateStore, + }, nil +} + +// CreateFineTuningJob creates a new fine-tuning job with business validation +func (s *fineTuningServiceImpl) CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) { + // Validate request + if req == nil { + return nil, fmt.Errorf("request cannot be nil") + } + if req.BaseModel == "" { + return nil, fmt.Errorf("base model is required") + } + if req.TrainingFile == "" { + return nil, fmt.Errorf("training file is required") + } + + if utils.IsLocalFilePath(req.TrainingFile) { + color.Green("\nuploading training file...") + + trainingDataID, err := s.UploadFile(ctx, utils.GetLocalFilePath(req.TrainingFile)) + if err != nil { + return nil, fmt.Errorf("failed to upload training file: %w", err) + } + req.TrainingFile = trainingDataID + } else { + color.Yellow("\nProvided training file is non-local, skipping upload...") + } + + // Upload validation file if provided + if req.ValidationFile != nil && *req.ValidationFile != "" { + if utils.IsLocalFilePath(*req.ValidationFile) { + color.Green("\nuploading validation file...") + validationDataID, err := s.UploadFile(ctx, utils.GetLocalFilePath(*req.ValidationFile)) + if err != nil { + return nil, fmt.Errorf("failed to upload validation file: %w", err) + } + req.ValidationFile = &validationDataID + } else { + color.Yellow("\nProvided validation file is non-local, skipping upload...") + } + } + + // Call provider with retry logic + var job *models.FineTuningJob + err := utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + job, err = s.provider.CreateFineTuningJob(ctx, req) + return err + }) + if err != nil { + return nil, fmt.Errorf("failed to create fine-tuning job: %w", err) + } + + // Persist job to state store if available + if s.stateStore != nil { + if err := s.stateStore.SaveJob(ctx, job); err != nil { + return nil, fmt.Errorf("failed to persist job: %w", err) + } + } + + return job, nil +} + +// GetFineTuningStatus retrieves the current status of a job +func (s *fineTuningServiceImpl) GetFineTuningStatus(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// ListFineTuningJobs lists all fine-tuning jobs for the user +func (s *fineTuningServiceImpl) ListFineTuningJobs(ctx context.Context, limit int, after string) ([]*models.FineTuningJob, error) { + var jobs []*models.FineTuningJob + + // Use retry utility for list operation + err := utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + jobs, err = s.provider.ListFineTuningJobs(ctx, limit, after) + return err + }) + + if err != nil { + return nil, fmt.Errorf("failed to list fine-tuning jobs: %w", err) + } + + return jobs, nil +} + +// GetFineTuningJobDetails retrieves detailed information about a job +func (s *fineTuningServiceImpl) GetFineTuningJobDetails(ctx context.Context, jobID string) (*models.FineTuningJobDetail, error) { + var jobDetail *models.FineTuningJobDetail + + // Use retry utility for job detail operation + err := utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + jobDetail, err = s.provider.GetFineTuningJobDetails(ctx, jobID) + return err + }) + + if err != nil { + return nil, fmt.Errorf("failed to get job details: %w", err) + } + + return jobDetail, nil +} + +// GetJobEvents retrieves events for a job with filtering and pagination +func (s *fineTuningServiceImpl) GetJobEvents(ctx context.Context, jobID string) (*models.JobEventsList, error) { + var eventsList *models.JobEventsList + + // Use retry utility for job events operation + err := utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + eventsList, err = s.provider.GetJobEvents(ctx, jobID) + return err + }) + + if err != nil { + return nil, fmt.Errorf("failed to get job events: %w", err) + } + + return eventsList, nil +} + +// GetJobCheckpoints retrieves checkpoints for a job with pagination +func (s *fineTuningServiceImpl) GetJobCheckpoints(ctx context.Context, jobID string) (*models.JobCheckpointsList, error) { + var checkpointList *models.JobCheckpointsList + + // Use retry utility for job checkpoints operation + err := utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + checkpointList, err = s.provider.GetJobCheckpoints(ctx, jobID) + return err + }) + + if err != nil { + return nil, fmt.Errorf("failed to get job checkpoints: %w", err) + } + + return checkpointList, nil +} + +// PauseJob pauses a running job (if applicable) +func (s *fineTuningServiceImpl) PauseJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// ResumeJob resumes a paused job (if applicable) +func (s *fineTuningServiceImpl) ResumeJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// CancelJob cancels a job with proper state validation +func (s *fineTuningServiceImpl) CancelJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} + +// UploadFile uploads and validates a file +func (s *fineTuningServiceImpl) UploadFile(ctx context.Context, filePath string) (string, error) { + if filePath == "" { + return "", fmt.Errorf("file path cannot be empty") + } + uploadedFileId, err := s.uploadFile(ctx, filePath) + if err != nil || uploadedFileId == "" { + return "", fmt.Errorf("failed to upload file: %w", err) + } + return uploadedFileId, nil +} + +func (s *fineTuningServiceImpl) uploadFile(ctx context.Context, filePath string) (string, error) { + // validate file existence + fileInfo, err := os.Stat(filePath) + if err != nil { + if os.IsNotExist(err) { + return "", fmt.Errorf("file does not exist: %s", filePath) + } + return "", fmt.Errorf("failed to stat file %s: %w", filePath, err) + } + if fileInfo.IsDir() { + return "", fmt.Errorf("path is a directory, not a file: %s", filePath) + } + + // upload file with retry + uploadedFileId := "" + err = utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + uploadedFileId, err = s.provider.UploadFile(ctx, filePath) + return err + }) + return uploadedFileId, err +} + +// PollJobUntilCompletion polls a job until it completes or fails +func (s *fineTuningServiceImpl) PollJobUntilCompletion(ctx context.Context, jobID string, intervalSeconds int) (*models.FineTuningJob, error) { + // TODO: Implement + return nil, nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/interface.go b/cli/azd/extensions/azure.ai.finetune/internal/services/interface.go new file mode 100644 index 00000000000..4bceba7daa2 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/interface.go @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package services + +import ( + "context" + + "azure.ai.finetune/pkg/models" +) + +// FineTuningService defines the business logic interface for fine-tuning operations +type FineTuningService interface { + // CreateFineTuningJob creates a new fine-tuning job with business validation + CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) + + // GetFineTuningStatus retrieves the current status of a job + GetFineTuningStatus(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // ListFineTuningJobs lists all fine-tuning jobs for the user + ListFineTuningJobs(ctx context.Context, limit int, after string) ([]*models.FineTuningJob, error) + + // GetFineTuningJobDetails retrieves detailed information about a job + GetFineTuningJobDetails(ctx context.Context, jobID string) (*models.FineTuningJobDetail, error) + + // GetJobEvents retrieves events for a job with filtering and pagination + GetJobEvents(ctx context.Context, jobID string) (*models.JobEventsList, error) + + // GetJobCheckpoints retrieves checkpoints for a job with pagination + GetJobCheckpoints(ctx context.Context, jobID string) (*models.JobCheckpointsList, error) + + // PauseJob pauses a running job (if applicable) + PauseJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // ResumeJob resumes a paused job (if applicable) + ResumeJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // CancelJob cancels a job with proper state validation + CancelJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // UploadFile uploads and validates a file + UploadFile(ctx context.Context, filePath string) (string, error) + + // PollJobUntilCompletion polls a job until it completes or fails + PollJobUntilCompletion(ctx context.Context, jobID string, intervalSeconds int) (*models.FineTuningJob, error) +} + +// DeploymentService defines the business logic interface for model deployment operations +type DeploymentService interface { + // DeployModel deploys a fine-tuned or base model with validation + DeployModel(ctx context.Context, req *models.DeploymentRequest) (*models.Deployment, error) + + // GetDeploymentStatus retrieves the current status of a deployment + GetDeploymentStatus(ctx context.Context, deploymentID string) (*models.Deployment, error) + + // ListDeployments lists all deployments for the user + ListDeployments(ctx context.Context, limit int, after string) ([]*models.Deployment, error) + + // UpdateDeployment updates deployment configuration (e.g., capacity) + UpdateDeployment(ctx context.Context, deploymentID string, capacity int32) (*models.Deployment, error) + + // DeleteDeployment deletes a deployment with proper validation + DeleteDeployment(ctx context.Context, deploymentID string) error + + // WaitForDeployment waits for a deployment to become active + WaitForDeployment(ctx context.Context, deploymentID string, timeoutSeconds int) (*models.Deployment, error) +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/state_store.go b/cli/azd/extensions/azure.ai.finetune/internal/services/state_store.go new file mode 100644 index 00000000000..02b93103160 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/state_store.go @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package services + +import ( + "context" + + "azure.ai.finetune/pkg/models" +) + +// StateStore defines the interface for persisting job state +// This allows tracking jobs across CLI sessions +type StateStore interface { + // SaveJob persists a job to local storage + SaveJob(ctx context.Context, job *models.FineTuningJob) error + + // GetJob retrieves a job from local storage + GetJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) + + // ListJobs lists all locally tracked jobs + ListJobs(ctx context.Context) ([]*models.FineTuningJob, error) + + // UpdateJobStatus updates the status of a tracked job + UpdateJobStatus(ctx context.Context, jobID string, status models.JobStatus) error + + // DeleteJob removes a job from local storage + DeleteJob(ctx context.Context, jobID string) error + + // SaveDeployment persists a deployment to local storage + SaveDeployment(ctx context.Context, deployment *models.Deployment) error + + // GetDeployment retrieves a deployment from local storage + GetDeployment(ctx context.Context, deploymentID string) (*models.Deployment, error) + + // ListDeployments lists all locally tracked deployments + ListDeployments(ctx context.Context) ([]*models.Deployment, error) + + // UpdateDeploymentStatus updates the status of a tracked deployment + UpdateDeploymentStatus(ctx context.Context, deploymentID string, status models.DeploymentStatus) error + + // DeleteDeployment removes a deployment from local storage + DeleteDeployment(ctx context.Context, deploymentID string) error +} + +// ErrorTransformer defines the interface for transforming vendor-specific errors +// to standardized error details +type ErrorTransformer interface { + // TransformError converts a vendor-specific error to a standardized ErrorDetail + TransformError(vendorError error, vendorCode string) *models.ErrorDetail +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/common.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/common.go new file mode 100644 index 00000000000..016fb813d78 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/common.go @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +package utils + +import "strings" + +func IsLocalFilePath(fileID string) bool { + return strings.HasPrefix(fileID, "local:") +} + +func GetLocalFilePath(fileID string) string { + if IsLocalFilePath(fileID) { + return fileID[6:] + } + return fileID +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/environment.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/environment.go new file mode 100644 index 00000000000..75c27f47251 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/environment.go @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package utils + +import ( + "context" + "fmt" + + "github.com/azure/azure-dev/cli/azd/pkg/azdext" +) + +const ( + EnvAzureTenantID = "AZURE_TENANT_ID" + EnvAzureSubscriptionID = "AZURE_SUBSCRIPTION_ID" + EnvAzureLocation = "AZURE_LOCATION" + EnvAzureAccountName = "AZURE_ACCOUNT_NAME" + EnvAzureOpenAIProjectName = "AZURE_PROJECT_NAME" + EnvAPIVersion = "AZURE_API_VERSION" + EnvFinetuningRoute = "AZURE_FINETUNING_ROUTE" + EnvFinetuningTokenScope = "AZURE_FINETUNING_TOKEN_SCOPE" +) + +// GetEnvironmentValues retrieves Azure environment configuration from azd client. +// Returns empty map if environment cannot be accessed. +func GetEnvironmentValues(ctx context.Context, azdClient *azdext.AzdClient) (map[string]string, error) { + envValueMap := make(map[string]string) + + envResponse, err := azdClient.Environment().GetCurrent(ctx, &azdext.EmptyRequest{}) + if err != nil { + return envValueMap, fmt.Errorf("failed to get current environment: %w", err) + } + env := envResponse.Environment + + envValues, err := azdClient.Environment().GetValues(ctx, &azdext.GetEnvironmentRequest{ + Name: env.Name, + }) + if err != nil { + return envValueMap, fmt.Errorf("failed to get environment values: %w", err) + } + + for _, value := range envValues.KeyValues { + envValueMap[value.Key] = value.Value + } + + return envValueMap, nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/output.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/output.go new file mode 100644 index 00000000000..1a3d919aa14 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/output.go @@ -0,0 +1,284 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package utils + +import ( + "encoding/json" + "fmt" + "os" + "reflect" + "strings" + "text/tabwriter" + "time" + + "gopkg.in/yaml.v3" +) + +// OutputFormat represents the output format type +type OutputFormat string + +const ( + FormatTable OutputFormat = "table" + FormatJSON OutputFormat = "json" + FormatYAML OutputFormat = "yaml" +) + +// PrintObject prints a struct or slice in the specified format +func PrintObject(obj interface{}, format OutputFormat) error { + switch format { + case FormatJSON: + return printJSON(obj) + case FormatYAML: + return printYAML(obj) + case FormatTable: + return printTable(obj) + default: + return fmt.Errorf("unsupported format: %s", format) + } +} + +// printJSON uses encoding/json which respects `json` tags +func printJSON(obj interface{}) error { + data, err := json.MarshalIndent(obj, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal JSON: %w", err) + } + fmt.Println(string(data)) + return nil +} + +// printYAML uses gopkg.in/yaml.v3 which respects `yaml` tags +func printYAML(obj interface{}) error { + data, err := yaml.Marshal(obj) + if err != nil { + return fmt.Errorf("failed to marshal YAML: %w", err) + } + fmt.Print(string(data)) + return nil +} + +// printTable uses text/tabwriter and reads `table` tags +func printTable(obj interface{}) error { + v := reflect.ValueOf(obj) + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + + if v.Kind() == reflect.Slice { + return printSliceAsTable(v) + } + + if v.Kind() == reflect.Struct { + return printStructAsKeyValue(v) + } + + return fmt.Errorf("table format requires a struct or slice, got %s", v.Kind()) +} + +// columnInfo holds table column metadata +type columnInfo struct { + header string + index int +} + +// getTableColumns extracts fields with `table` tags +func getTableColumns(t reflect.Type) []columnInfo { + var cols []columnInfo + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + if !field.IsExported() { + continue + } + tag := field.Tag.Get("table") + if tag != "" && tag != "-" { + cols = append(cols, columnInfo{header: tag, index: i}) + } + } + return cols +} + +// printSliceAsTable prints a slice of structs as a table with headers and rows +func printSliceAsTable(v reflect.Value) error { + if v.Len() == 0 { + fmt.Println("No items to display") + return nil + } + + // Get element type + firstElem := v.Index(0) + if firstElem.Kind() == reflect.Ptr { + firstElem = firstElem.Elem() + } + + if firstElem.Kind() != reflect.Struct { + return fmt.Errorf("slice elements must be structs, got %s", firstElem.Kind()) + } + + cols := getTableColumns(firstElem.Type()) + if len(cols) == 0 { + return fmt.Errorf("no fields with table tags found") + } + + // Create tabwriter + w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) + + // Print header row + headers := make([]string, len(cols)) + for i, col := range cols { + headers[i] = col.header + } + fmt.Fprintln(w, strings.Join(headers, "\t")) + + // Print separator row + separators := make([]string, len(cols)) + for i, col := range cols { + separators[i] = strings.Repeat("-", len(col.header)) + } + fmt.Fprintln(w, strings.Join(separators, "\t")) + + // Print data rows + for i := 0; i < v.Len(); i++ { + elem := v.Index(i) + if elem.Kind() == reflect.Ptr { + elem = elem.Elem() + } + + values := make([]string, len(cols)) + for j, col := range cols { + values[j] = formatFieldValue(elem.Field(col.index)) + } + fmt.Fprintln(w, strings.Join(values, "\t")) + } + + return w.Flush() +} + +// printStructAsKeyValue prints a single struct as key-value pairs +func printStructAsKeyValue(v reflect.Value) error { + t := v.Type() + cols := getTableColumns(t) + + if len(cols) == 0 { + return fmt.Errorf("no fields with table tags found") + } + + w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) + + for _, col := range cols { + value := formatFieldValue(v.Field(col.index)) + fmt.Fprintf(w, "%s:\t%s\n", col.header, value) + } + + return w.Flush() +} + +// PrintObjectWithIndent prints a struct or slice in the specified format with indentation +func PrintObjectWithIndent(obj interface{}, format OutputFormat, indent string) error { + if format != FormatTable { + return PrintObject(obj, format) + } + + v := reflect.ValueOf(obj) + if v.Kind() == reflect.Ptr { + if v.IsNil() { + return nil + } + v = v.Elem() + } + + if v.Kind() != reflect.Struct { + return fmt.Errorf("expected struct, got %s", v.Kind()) + } + + t := v.Type() + cols := getTableColumns(t) + + if len(cols) == 0 { + return fmt.Errorf("no fields with table tags found") + } + + w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) + + for _, col := range cols { + value := formatFieldValue(v.Field(col.index)) + fmt.Fprintf(w, "%s%s:\t%s\n", indent, col.header, value) + } + + return w.Flush() +} + +// formatFieldValue converts a reflect.Value to a string representation +func formatFieldValue(v reflect.Value) string { + if !v.IsValid() { + return "-" + } + + // Handle pointers + if v.Kind() == reflect.Ptr { + if v.IsNil() { + return "-" + } + v = v.Elem() + } + + // Handle time.Time + if v.Type().String() == "time.Time" { + if t, ok := v.Interface().(interface{ Format(string) string }); ok { + return t.Format("2006-01-02 15:04") + } + } + + // Handle time.Duration + if v.Type().String() == "time.Duration" || v.Type().String() == "models.Duration" { + d := time.Duration(v.Int()) + if d == 0 { + return "-" + } + h := int(d.Hours()) + m := int(d.Minutes()) % 60 + return fmt.Sprintf("%dh %02dm", h, m) + } + + switch v.Kind() { + case reflect.String: + if v.String() == "" { + return "-" + } + return v.String() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return fmt.Sprintf("%d", v.Int()) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return fmt.Sprintf("%d", v.Uint()) + case reflect.Float32, reflect.Float64: + return fmt.Sprintf("%.4f", v.Float()) + case reflect.Bool: + return fmt.Sprintf("%t", v.Bool()) + default: + return fmt.Sprintf("%v", v.Interface()) + } +} + +// addSpaces inserts spaces before capital letters +func addSpaces(s string) string { + var result strings.Builder + for i, r := range s { + if i > 0 && r >= 'A' && r <= 'Z' { + result.WriteRune(' ') + } + result.WriteRune(r) + } + return result.String() +} + +// toTitleCase converts snake_case to Title Case +func toTitleCase(s string) string { + s = strings.ReplaceAll(s, "_", " ") + words := strings.Fields(s) + for i, word := range words { + if len(word) > 0 { + words[i] = strings.ToUpper(string(word[0])) + strings.ToLower(word[1:]) + } + } + return strings.Join(words, " ") +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/parser.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/parser.go new file mode 100644 index 00000000000..c909b4d5545 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/parser.go @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package utils + +import ( + "fmt" + "os" + + "azure.ai.finetune/pkg/models" + "github.com/braydonk/yaml" +) + +func ParseCreateFineTuningRequestConfig(filePath string) (*models.CreateFineTuningRequest, error) { + // Read the YAML file + yamlFile, err := os.ReadFile(filePath) + if err != nil { + return nil, fmt.Errorf("failed to read config file %s: %w", filePath, err) + } + + // Parse YAML into config struct + var config models.CreateFineTuningRequest + if err := yaml.Unmarshal(yamlFile, &config); err != nil { + return nil, fmt.Errorf("failed to parse YAML config: %w", err) + } + + // Validate the configuration + if err := config.Validate(); err != nil { + return nil, fmt.Errorf("invalid configuration: %w", err) + } + + return &config, nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/retry.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/retry.go new file mode 100644 index 00000000000..4f2c30b30f6 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/retry.go @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package utils + +import ( + "context" + "time" + + "github.com/sethvargo/go-retry" +) + +const ( + // DefaultMaxAttempts is the default number of retry attempts + DefaultMaxAttempts = 3 + // DefaultDelaySeconds is the default initial delay in seconds + DefaultDelaySeconds = 2 +) + +// DefaultRetryConfig returns a default exponential backoff strategy +func DefaultRetryConfig() retry.Backoff { + return retry.WithMaxRetries( + DefaultMaxAttempts-1, + retry.NewExponential(DefaultDelaySeconds*time.Second), + ) +} + +// RetryOperation executes the given operation with retry logic +// All errors returned by the operation are considered retryable +func RetryOperation(ctx context.Context, backoff retry.Backoff, operation func() error) error { + if backoff == nil { + backoff = DefaultRetryConfig() + } + + return retry.Do(ctx, backoff, func(ctx context.Context) error { + if err := operation(); err != nil { + return retry.RetryableError(err) + } + return nil + }) +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/status.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/status.go new file mode 100644 index 00000000000..fd10e1eae48 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/status.go @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package utils + +import "azure.ai.finetune/pkg/models" + +// getStatusSymbol returns a symbol representation for job status +func GetStatusSymbol(status models.JobStatus) string { + switch status { + case models.StatusPending: + return "⌛" + case models.StatusQueued: + return "📚" + case models.StatusRunning: + return "🔄" + case models.StatusSucceeded: + return "✅" + case models.StatusFailed: + return "💥" + case models.StatusCancelled: + return "❌" + default: + return "❓" + } +} + +func IsTerminalStatus(s models.JobStatus) bool { + return s == models.StatusSucceeded || s == models.StatusFailed || s == models.StatusCancelled +} \ No newline at end of file diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/time.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/time.go new file mode 100644 index 00000000000..52b9d378333 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/time.go @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package utils + +import ( + "time" +) + +// TimeFormat defines the standard time format used for display output +const TimeFormat = "2006-01-02 15:04:05 UTC" + +// UnixTimestampToUTC converts a Unix timestamp (seconds since epoch) to a UTC time.Time. +// Returns zero time.Time if timestamp is 0. +func UnixTimestampToUTC(timestamp int64) time.Time { + if timestamp == 0 { + return time.Time{} + } + return time.Unix(timestamp, 0).UTC() +} + +// FormatTime formats a time.Time to the standard display format. +// Returns empty string if time is zero. +func FormatTime(t time.Time) string { + if t.IsZero() { + return "" + } + return t.Format(TimeFormat) +} diff --git a/cli/azd/extensions/azure.ai.finetune/main.go b/cli/azd/extensions/azure.ai.finetune/main.go new file mode 100644 index 00000000000..70b4dee7748 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/main.go @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +package main + +import ( + "context" + "os" + + "azure.ai.finetune/internal/cmd" + "github.com/fatih/color" +) + +func init() { + forceColorVal, has := os.LookupEnv("FORCE_COLOR") + if has && forceColorVal == "1" { + color.NoColor = false + } +} + +func main() { + // Execute the root command + ctx := context.Background() + rootCmd := cmd.NewRootCommand() + + if err := rootCmd.ExecuteContext(ctx); err != nil { + color.Red("Error: %v", err) + os.Exit(1) + } +} diff --git a/cli/azd/extensions/azure.ai.finetune/pkg/models/deployment.go b/cli/azd/extensions/azure.ai.finetune/pkg/models/deployment.go new file mode 100644 index 00000000000..0a6257868da --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/pkg/models/deployment.go @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package models + +import "time" + +// DeploymentStatus represents the status of a deployment +type DeploymentStatus string + +const ( + DeploymentPending DeploymentStatus = "pending" + DeploymentActive DeploymentStatus = "active" + DeploymentUpdating DeploymentStatus = "updating" + DeploymentFailed DeploymentStatus = "failed" + DeploymentDeleting DeploymentStatus = "deleting" +) + +// Deployment represents a model deployment +type Deployment struct { + // Core identification + ID string + VendorID string // Vendor-specific ID + + // Deployment details + Name string + Status DeploymentStatus + FineTunedModel string + BaseModel string + + // Endpoint + Endpoint string + + // Timestamps + CreatedAt time.Time + UpdatedAt *time.Time + DeletedAt *time.Time + + // Metadata + VendorMetadata map[string]interface{} + ErrorDetails *ErrorDetail +} + +// DeploymentRequest represents a request to create a deployment +type DeploymentRequest struct { + DeploymentName string + ModelID string + ModelFormat string + SKU string + Version string + Capacity int32 + SubscriptionID string + ResourceGroup string + AccountName string + TenantID string + WaitForCompletion bool +} + +// DeploymentConfig contains configuration for deploying a model +type DeploymentConfig struct { + JobID string + DeploymentName string + ModelFormat string + SKU string + Version string + Capacity int32 + SubscriptionID string + ResourceGroup string + AccountName string + TenantID string + WaitForCompletion bool +} + +// BaseModel represents information about a base model +type BaseModel struct { + ID string + Name string + Description string + Deprecated bool +} diff --git a/cli/azd/extensions/azure.ai.finetune/pkg/models/errors.go b/cli/azd/extensions/azure.ai.finetune/pkg/models/errors.go new file mode 100644 index 00000000000..98fd25db404 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/pkg/models/errors.go @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package models + +// ErrorDetail represents a standardized error response across vendors +type ErrorDetail struct { + Code string // Standard error code (e.g., "INVALID_REQUEST", "RATE_LIMITED") + Message string // User-friendly error message + Retryable bool // Whether the operation can be retried + VendorError error // Original vendor-specific error (for debugging) + VendorCode string // Vendor-specific error code +} + +// Common error codes +const ( + ErrorCodeInvalidRequest = "INVALID_REQUEST" + ErrorCodeNotFound = "NOT_FOUND" + ErrorCodeUnauthorized = "UNAUTHORIZED" + ErrorCodeForbidden = "FORBIDDEN" + ErrorCodeRateLimited = "RATE_LIMITED" + ErrorCodeServiceUnavailable = "SERVICE_UNAVAILABLE" + ErrorCodeInternalError = "INTERNAL_ERROR" + ErrorCodeInvalidModel = "INVALID_MODEL" + ErrorCodeInvalidFileSize = "INVALID_FILE_SIZE" + ErrorCodeOperationFailed = "OPERATION_FAILED" +) + +// Error implements the error interface +func (e *ErrorDetail) Error() string { + return e.Message +} diff --git a/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go b/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go new file mode 100644 index 00000000000..1ec3e7e4c2a --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go @@ -0,0 +1,339 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package models + +import ( + "fmt" + "time" +) + +// JobStatus represents the status of a fine-tuning job +type JobStatus string + +const ( + StatusPending JobStatus = "pending" + StatusQueued JobStatus = "queued" + StatusRunning JobStatus = "running" + StatusSucceeded JobStatus = "succeeded" + StatusFailed JobStatus = "failed" + StatusCancelled JobStatus = "cancelled" + StatusPaused JobStatus = "paused" +) + +// Represents the type of method used for fine-tuning +type MethodType string + +const ( + Supervised MethodType = "supervised" + DPO MethodType = "dpo" + Reinforcement MethodType = "reinforcement" +) + +type Duration time.Duration + +func (d Duration) MarshalJSON() ([]byte, error) { + if d == 0 { + return []byte(`"-"`), nil + } + h := int(time.Duration(d).Hours()) + m := int(time.Duration(d).Minutes()) % 60 + return []byte(fmt.Sprintf(`"%dh %02dm"`, h, m)), nil +} + +func (d Duration) MarshalYAML() (interface{}, error) { + if d == 0 { + return "-", nil + } + h := int(time.Duration(d).Hours()) + m := int(time.Duration(d).Minutes()) % 60 + return fmt.Sprintf("%dh %02dm", h, m), nil +} + +// FineTuningJob represents a vendor-agnostic fine-tuning job +type FineTuningJob struct { + // Core identification + ID string `json:"id" table:"ID"` + VendorJobID string `json:"-" table:"-"` // Vendor-specific ID (e.g., OpenAI's ftjob-xxx) + + // Job details + BaseModel string `json:"model" table:"MODEL"` + Status JobStatus `json:"status" table:"STATUS"` + FineTunedModel string `json:"-" table:"-"` + + // Timestamps + CreatedAt time.Time `json:"created_at" table:"CREATED"` + Duration Duration `json:"duration" table:"DURATION"` + CompletedAt *time.Time `json:"-" table:"-"` + + // Files + TrainingFileID string `json:"-" table:"-"` + ValidationFileID string `json:"-" table:"-"` + + // Metadata + VendorMetadata map[string]interface{} `json:"-" table:"-"` // Store vendor-specific details + ErrorDetails *ErrorDetail `json:"-" table:"-"` +} + +// Hyperparameters represents fine-tuning hyperparameters +type Hyperparameters struct { + BatchSize int64 `json:"batch_size" yaml:"batch_size"` + LearningRateMultiplier float64 `json:"learning_rate_multiplier" yaml:"learning_rate_multiplier"` + NEpochs int64 `json:"n_epochs" yaml:"n_epochs"` + Beta float64 `json:"beta,omitempty" yaml:"beta,omitempty"` // For DPO + ComputeMultiplier float64 `json:"compute_multiplier,omitempty" yaml:"compute_multiplier,omitempty"` // For Reinforcement + EvalInterval int64 `json:"eval_interval,omitempty" yaml:"eval_interval,omitempty"` // For Reinforcement + EvalSamples int64 `json:"eval_samples,omitempty" yaml:"eval_samples,omitempty"` // For Reinforcement + ReasoningEffort string `json:"reasoning_effort,omitempty" yaml:"reasoning_effort,omitempty"` // For Reinforcement +} + +// ListFineTuningJobsRequest represents a request to list fine-tuning jobs +type ListFineTuningJobsRequest struct { + Limit int + After string +} + +// FineTuningJobDetail represents detailed information about a fine-tuning job +type FineTuningJobDetail struct { + ID string `json:"id" yaml:"id"` + Status JobStatus `json:"status" yaml:"status"` + Model string `json:"model" yaml:"model"` + FineTunedModel string `json:"fine_tuned_model" yaml:"fine_tuned_model"` + CreatedAt time.Time `json:"created_at" yaml:"created_at"` + FinishedAt *time.Time `json:"finished_at,omitempty" yaml:"finished_at,omitempty"` + EstimatedFinish *time.Time `json:"estimated_finish,omitempty" yaml:"estimated_finish,omitempty"` + Method string `json:"training_type" yaml:"training_type"` + TrainingFile string `json:"training_file" yaml:"training_file"` + ValidationFile string `json:"validation_file,omitempty" yaml:"validation_file,omitempty"` + Hyperparameters *Hyperparameters `json:"hyperparameters" yaml:"hyperparameters"` + VendorMetadata map[string]interface{} `json:"-" yaml:"-"` + Seed int64 `json:"-" yaml:"-"` +} + +// JobEvent represents an event associated with a fine-tuning job +type JobEvent struct { + ID string + CreatedAt time.Time + Level string + Message string + Data interface{} + Type string +} + +// JobEventsList represents a paginated list of job events +type JobEventsList struct { + Data []JobEvent + HasMore bool +} + +// JobCheckpoint represents a checkpoint of a fine-tuning job +type JobCheckpoint struct { + ID string + CreatedAt time.Time + FineTunedModelCheckpoint string + Metrics *CheckpointMetrics + FineTuningJobID string + StepNumber int64 +} + +// JobCheckpointsList represents a list of job checkpoints +type JobCheckpointsList struct { + Data []JobCheckpoint + HasMore bool +} + +// CheckpointMetrics represents metrics for a checkpoint +type CheckpointMetrics struct { + FullValidLoss float64 + FullValidMeanTokenAccuracy float64 +} + +// CreateFineTuningRequest represents a request to create a fine-tuning job +type CreateFineTuningRequest struct { + // Required: The name of the model to fine-tune + BaseModel string `yaml:"model"` + + // Required: Path to training file + // Format: "file-id" or "local:/path/to/file.jsonl" + TrainingFile string `yaml:"training_file"` + + // Optional: Path to validation file + ValidationFile *string `yaml:"validation_file,omitempty"` + + // Optional: Suffix for the fine-tuned model name (up to 64 characters) + // Example: "custom-model-name" produces "ft:gpt-4o-mini:openai:custom-model-name:7p4lURel" + Suffix *string `yaml:"suffix,omitempty"` + + // Optional: Random seed for reproducibility + Seed *int64 `yaml:"seed,omitempty"` + + // Optional: Custom metadata for the fine-tuning job + // Max 16 key-value pairs, keys max 64 chars, values max 512 chars + Metadata map[string]string `yaml:"metadata,omitempty"` + + // Optional: Fine-tuning method configuration (supervised, dpo, or reinforcement) + Method MethodConfig `yaml:"method,omitempty"` + + // Optional: Integrations to enable (e.g., wandb for Weights & Biases) + Integrations []Integration `yaml:"integrations,omitempty"` + + // Optional: Additional request body fields not covered by standard config + ExtraBody map[string]interface{} `yaml:"extra_body,omitempty"` +} + +// MethodConfig represents fine-tuning method configuration +type MethodConfig struct { + // Type of fine-tuning method: "supervised", "dpo", or "reinforcement" + Type string `yaml:"type"` + + // Supervised fine-tuning configuration + Supervised *SupervisedConfig `yaml:"supervised,omitempty"` + + // Direct Preference Optimization (DPO) configuration + DPO *DPOConfig `yaml:"dpo,omitempty"` + + // Reinforcement learning fine-tuning configuration + Reinforcement *ReinforcementConfig `yaml:"reinforcement,omitempty"` +} + +// SupervisedConfig represents supervised fine-tuning method configuration +// Suitable for standard supervised learning tasks +type SupervisedConfig struct { + Hyperparameters HyperparametersConfig `yaml:"hyperparameters,omitempty"` +} + +// DPOConfig represents Direct Preference Optimization (DPO) configuration +// DPO is used for preference-based fine-tuning +type DPOConfig struct { + Hyperparameters HyperparametersConfig `yaml:"hyperparameters,omitempty"` +} + +// ReinforcementConfig represents reinforcement learning fine-tuning configuration +// Suitable for reasoning models that benefit from reinforcement learning +type ReinforcementConfig struct { + // Grader configuration for reinforcement learning (evaluates model outputs) + Grader map[string]interface{} `yaml:"grader,omitempty"` + + // Hyperparameters specific to reinforcement learning + Hyperparameters HyperparametersConfig `yaml:"hyperparameters,omitempty"` +} + +// HyperparametersConfig represents hyperparameter configuration +// Values can be integers, floats, or "auto" for automatic configuration +type HyperparametersConfig struct { + // Number of training epochs + // Can be: integer (1-10), "auto" + Epochs interface{} `yaml:"epochs,omitempty"` + + // Batch size for training + // Can be: integer (1, 8, 16, 32, 64, 128), "auto" + BatchSize interface{} `yaml:"batch_size,omitempty"` + + // Learning rate multiplier + // Can be: float (0.1-2.0), "auto" + LearningRateMultiplier interface{} `yaml:"learning_rate_multiplier,omitempty"` + + // Weight for prompt loss in supervised learning (0.0-1.0) + PromptLossWeight *float64 `yaml:"prompt_loss_weight,omitempty"` + + // Beta parameter for DPO (temperature-like parameter) + // Can be: float, "auto" + Beta interface{} `yaml:"beta,omitempty"` + + // Compute multiplier for reinforcement learning + // Multiplier on amount of compute used for exploring search space during training + // Can be: float, "auto" + ComputeMultiplier interface{} `yaml:"compute_multiplier,omitempty"` + + // Reasoning effort level for reinforcement learning with reasoning models + // Options: "low", "medium", "high" + ReasoningEffort string `yaml:"reasoning_effort,omitempty"` + + // Evaluation interval for reinforcement learning + // Number of training steps between evaluation runs + // Can be: integer, "auto" + EvalInterval interface{} `yaml:"eval_interval,omitempty"` + + // Evaluation samples for reinforcement learning + // Number of evaluation samples to generate per training step + // Can be: integer, "auto" + EvalSamples interface{} `yaml:"eval_samples,omitempty"` +} + +// Integration represents integration configuration (e.g., Weights & Biases) +type Integration struct { + // Type of integration: "wandb" (Weights & Biases), etc. + Type string `yaml:"type"` + + // Integration-specific configuration (API keys, project names, etc.) + Config map[string]interface{} `yaml:"config,omitempty"` +} + +// Validate checks if the configuration is valid +func (c CreateFineTuningRequest) Validate() error { + // Validate required fields + if c.BaseModel == "" { + return fmt.Errorf("model is required") + } + + if c.TrainingFile == "" { + return fmt.Errorf("training_file is required") + } + + // Validate method if provided + if c.Method.Type != "" { + if c.Method.Type != string(Supervised) && c.Method.Type != string(DPO) && c.Method.Type != string(Reinforcement) { + return fmt.Errorf("invalid method type: %s (must be 'supervised', 'dpo', or 'reinforcement')", c.Method.Type) + } + + // Validate method-specific configuration + switch c.Method.Type { + case string(Supervised): + if c.Method.Supervised == nil { + return fmt.Errorf("supervised method requires 'supervised' configuration block") + } + case string(DPO): + if c.Method.DPO == nil { + return fmt.Errorf("dpo method requires 'dpo' configuration block") + } + case string(Reinforcement): + if c.Method.Reinforcement == nil { + return fmt.Errorf("reinforcement method requires 'reinforcement' configuration block") + } + } + } + + // Validate integrations if provided + if len(c.Integrations) > 0 { + for _, integration := range c.Integrations { + if integration.Type == "" { + return fmt.Errorf("integration type is required if integrations are specified") + } + if integration.Config == nil { + return fmt.Errorf("integration of type '%s' requires 'config' block", integration.Type) + } + } + } + + // Validate suffix length if provided + if c.Suffix != nil && len(*c.Suffix) > 64 { + return fmt.Errorf("suffix exceeds maximum length of 64 characters: %d", len(*c.Suffix)) + } + + // Validate metadata constraints + if c.Metadata != nil { + if len(c.Metadata) > 16 { + return fmt.Errorf("metadata exceeds maximum of 16 key-value pairs: %d", len(c.Metadata)) + } + for k, v := range c.Metadata { + if len(k) > 64 { + return fmt.Errorf("metadata key exceeds maximum length of 64 characters: %s", k) + } + if len(v) > 512 { + return fmt.Errorf("metadata value exceeds maximum length of 512 characters for key: %s", k) + } + } + } + + return nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/pkg/models/requests.go b/cli/azd/extensions/azure.ai.finetune/pkg/models/requests.go new file mode 100644 index 00000000000..3c42e3c146d --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/pkg/models/requests.go @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +package models + +// PauseJobRequest represents a request to pause a fine-tuning job +type PauseJobRequest struct { + JobID string +} + +// ResumeJobRequest represents a request to resume a fine-tuning job +type ResumeJobRequest struct { + JobID string +} + +// CancelJobRequest represents a request to cancel a fine-tuning job +type CancelJobRequest struct { + JobID string +} + +// GetJobDetailsRequest represents a request to get job details +type GetJobDetailsRequest struct { + JobID string +} + +// GetJobEventsRequest represents a request to list job events +type GetJobEventsRequest struct { + JobID string + Limit int + After string +} + +// GetJobCheckpointsRequest represents a request to list job checkpoints +type GetJobCheckpointsRequest struct { + JobID string + Limit int + After string +} + +// ListDeploymentsRequest represents a request to list deployments +type ListDeploymentsRequest struct { + Limit int + After string +} + +// GetDeploymentRequest represents a request to get deployment details +type GetDeploymentRequest struct { + DeploymentID string +} + +// DeleteDeploymentRequest represents a request to delete a deployment +type DeleteDeploymentRequest struct { + DeploymentID string +} + +// UpdateDeploymentRequest represents a request to update a deployment +type UpdateDeploymentRequest struct { + DeploymentID string + Capacity int32 +} diff --git a/cli/azd/extensions/azure.ai.finetune/pkg/models/views.go b/cli/azd/extensions/azure.ai.finetune/pkg/models/views.go new file mode 100644 index 00000000000..e04a2baee39 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/pkg/models/views.go @@ -0,0 +1,186 @@ +package models + +import ( + "fmt" + "time" +) + +// FineTuningJobTableView is the table display representation for job listings +type FineTuningJobTableView struct { + ID string `table:"ID"` + Status JobStatus `table:"Status"` + BaseModel string `table:"Model"` + CreatedAt time.Time `table:"Created"` +} + +// JobDetailsView is the basic job info section +type JobDetailsView struct { + ID string `table:"ID"` + Status JobStatus `table:"Status"` + Model string `table:"Model"` + FineTunedModel string `table:"Fine-tuned Model"` +} + +// TimestampsView is the timestamps section +type TimestampsView struct { + Created string `table:"Created"` + Finished string `table:"Finished"` + EstimatedETA string `table:"Estimated ETA"` +} + +// BaseConfigurationView has fields common to all methods +type BaseConfigurationView struct { + TrainingType string `table:"Training Type"` + Epochs int64 `table:"Epochs"` + BatchSize int64 `table:"Batch Size"` + LearningRate string `table:"Learning Rate"` +} + +// DPOConfigurationView has DPO-specific fields +type DPOConfigurationView struct { + TrainingType string `table:"Training Type"` + Epochs int64 `table:"Epochs"` + BatchSize int64 `table:"Batch Size"` + LearningRate string `table:"Learning Rate"` + Beta string `table:"Beta"` +} + +// ReinforcementConfigurationView has reinforcement-specific fields +type ReinforcementConfigurationView struct { + TrainingType string `table:"Training Type"` + Epochs int64 `table:"Epochs"` + BatchSize int64 `table:"Batch Size"` + LearningRate string `table:"Learning Rate"` + ComputeMultiplier string `table:"Compute Multiplier"` + EvalInterval string `table:"Eval Interval"` + EvalSamples string `table:"Eval Samples"` + ReasoningEffort string `table:"Reasoning Effort"` +} + +// DataView is the training/validation data section +type DataView struct { + TrainingFile string `table:"Training File"` + ValidationFile string `table:"Validation File"` +} + +// JobDetailViews contains all view sections for a job detail display +type JobDetailViews struct { + Details *JobDetailsView + Timestamps *TimestampsView + Configuration interface{} // Can be Base, DPO, or Reinforcement view + Data *DataView +} + +// ToTableView converts a FineTuningJob to its table view (for list command) +func (j *FineTuningJob) ToTableView() *FineTuningJobTableView { + return &FineTuningJobTableView{ + ID: j.ID, + Status: j.Status, + BaseModel: j.BaseModel, + CreatedAt: j.CreatedAt, + } +} + +// ToDetailViews converts a FineTuningJobDetail to its sectioned views (for show command) +func (j *FineTuningJobDetail) ToDetailViews() *JobDetailViews { + fineTunedModel := j.FineTunedModel + if fineTunedModel == "" { + fineTunedModel = "-" + } + + // Build configuration view based on method type + var configView interface{} + switch j.Method { + case string(DPO): + configView = &DPOConfigurationView{ + TrainingType: j.Method, + Epochs: j.Hyperparameters.NEpochs, + BatchSize: j.Hyperparameters.BatchSize, + LearningRate: formatFloatOrDash(j.Hyperparameters.LearningRateMultiplier), + Beta: formatFloatOrDash(j.Hyperparameters.Beta), + } + case string(Reinforcement): + configView = &ReinforcementConfigurationView{ + TrainingType: j.Method, + Epochs: j.Hyperparameters.NEpochs, + BatchSize: j.Hyperparameters.BatchSize, + LearningRate: formatFloatOrDash(j.Hyperparameters.LearningRateMultiplier), + ComputeMultiplier: formatFloatOrDash(j.Hyperparameters.ComputeMultiplier), + EvalInterval: formatInt64OrDash(j.Hyperparameters.EvalInterval), + EvalSamples: formatInt64OrDash(j.Hyperparameters.EvalSamples), + ReasoningEffort: stringOrDash(j.Hyperparameters.ReasoningEffort), + } + default: // supervised or unknown + configView = &BaseConfigurationView{ + TrainingType: j.Method, + Epochs: j.Hyperparameters.NEpochs, + BatchSize: j.Hyperparameters.BatchSize, + LearningRate: formatFloatOrDash(j.Hyperparameters.LearningRateMultiplier), + } + } + + return &JobDetailViews{ + Details: &JobDetailsView{ + ID: j.ID, + Status: j.Status, + Model: j.Model, + FineTunedModel: fineTunedModel, + }, + Timestamps: &TimestampsView{ + Created: formatTimeOrDash(j.CreatedAt), + Finished: formatTimePointerOrDash(j.FinishedAt), + EstimatedETA: formatTimePointerOrDash(j.EstimatedFinish), + }, + Configuration: configView, + Data: &DataView{ + TrainingFile: j.TrainingFile, + ValidationFile: stringOrDash(j.ValidationFile), + }, + } +} + +// ToTableViews converts a slice of jobs to table views +func ToTableViews(job *FineTuningJob) *FineTuningJobTableView { + view := job.ToTableView() + return view +} + +func formatFloat(f float64) string { + return fmt.Sprintf("%g", f) +} + +func formatFloatOrDash(f float64) string { + if f == 0 { + return "-" + } + return fmt.Sprintf("%g", f) +} + +func formatInt64OrDash(i int64) string { + if i == 0 { + return "-" + } + return fmt.Sprintf("%d", i) +} + +func stringOrDash(s string) string { + if s == "" { + return "-" + } + return s +} + +// Add this helper +func formatTimeOrDash(t time.Time) string { + if t.IsZero() { + return "-" + } + return t.Format("2006-01-02 15:04") +} + +func formatTimePointerOrDash(t *time.Time) string { + if t == nil || t.IsZero() { + return "-" + } + return t.Format("2006-01-02 15:04") +} diff --git a/cli/azd/extensions/azure.ai.finetune/version.txt b/cli/azd/extensions/azure.ai.finetune/version.txt new file mode 100644 index 00000000000..58842435b07 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/version.txt @@ -0,0 +1 @@ +0.0.8-preview diff --git a/eng/pipelines/release-ext-azure-ai-finetune.yml b/eng/pipelines/release-ext-azure-ai-finetune.yml new file mode 100644 index 00000000000..fd04e3b4270 --- /dev/null +++ b/eng/pipelines/release-ext-azure-ai-finetune.yml @@ -0,0 +1,32 @@ +# Continuous deployment trigger +trigger: + branches: + include: + - main + paths: + include: + - go.mod + - cli/azd/extensions/azure.ai.finetune + - eng/pipelines/release-azd-extension.yml + - /eng/pipelines/templates/jobs/build-azd-extension.yml + - /eng/pipelines/templates/jobs/cross-build-azd-extension.yml + - /eng/pipelines/templates/variables/image.yml + +pr: + paths: + include: + - cli/azd/extensions/azure.ai.finetune + - eng/pipelines/release-azd-extension.yml + - eng/pipelines/templates/steps/publish-cli.yml + exclude: + - cli/azd/docs/** + +extends: + template: /eng/pipelines/templates/stages/1es-redirect.yml + parameters: + stages: + - template: /eng/pipelines/templates/stages/release-azd-extension.yml + parameters: + AzdExtensionId: azure.ai.finetune + SanitizedExtensionId: azure-ai-finetune + AzdExtensionDirectory: cli/azd/extensions/azure.ai.finetune \ No newline at end of file diff --git a/eng/pipelines/templates/steps/setup-go.yml b/eng/pipelines/templates/steps/setup-go.yml index cd4375a9b15..c464afea92f 100644 --- a/eng/pipelines/templates/steps/setup-go.yml +++ b/eng/pipelines/templates/steps/setup-go.yml @@ -40,6 +40,10 @@ steps: Write-Host "##vso[task.prependpath]$goBin" displayName: Add Go bin to PATH + - pwsh: | + Write-Host "GOPROXY: $($env:GOPROXY)" + displayName: GOPROXY + - pwsh: go install gotest.tools/gotestsum@latest condition: ${{ parameters.Condition }} displayName: Install gotest