-
Notifications
You must be signed in to change notification settings - Fork 0
170 lines (145 loc) · 5.3 KB
/
performance.yml
File metadata and controls
170 lines (145 loc) · 5.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
---
name: Performance Testing
"on":
pull_request:
branches: [master]
push:
branches: [master]
schedule:
# Run performance tests weekly on Sunday at 23:00 UTC
- cron: "0 23 * * 0"
workflow_dispatch:
inputs:
benchmark_target:
description: "Target to benchmark (apps, brews, recommend, outdated, all)"
required: false
default: "all"
type: choice
options:
- apps
- brews
- recommend
- outdated
- all
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
env:
PYTHONUNBUFFERED: 1
FORCE_COLOR: 1
jobs:
# Lightweight smoke check so the required "Performance Testing" status is present on PRs
smoke:
name: Performance Smoke Check
runs-on: ubuntu-latest
permissions: {}
if: github.event_name == 'pull_request' || github.event_name == 'push'
steps:
- name: No-op performance status
run: |
echo "Performance tests are scheduled; smoke check passing for PR/push."
performance-test:
name: Performance Testing on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
permissions:
contents: read
# Only run full performance suite on schedule or manual dispatch
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
strategy:
fail-fast: false
matrix:
os: [macos-latest]
python-version: ["3.13"] # Use stable version for consistent results
steps:
- name: Checkout code
uses: actions/checkout@v6
with:
fetch-depth: 0 # Need full history for performance comparison
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
- name: Install Homebrew (if not present)
if: runner.os == 'macOS'
run: |
if ! command -v brew &> /dev/null; then
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
fi
- name: Install dependencies
uses: ./.github/actions/setup-python-deps
- name: Make performance scripts executable
run: |
chmod +x scripts/performance_test.py scripts/compare_performance.py
- name: Run performance benchmarks
env:
BENCHMARK_TARGET: ${{ github.event.inputs.benchmark_target || 'all' }}
run: |
python scripts/performance_test.py
- name: Upload performance results
uses: actions/upload-artifact@v7
with:
name: performance-results-${{ matrix.os }}
path: performance_results.json
retention-days: 30
- name: Compare with previous results (if available)
run: |
python scripts/compare_performance.py
- name: Create performance summary
run: |
echo "## Performance Test Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Command | Avg Time (s) | Memory (MB) |" >> $GITHUB_STEP_SUMMARY
echo "|---------|--------------|-------------|" >> $GITHUB_STEP_SUMMARY
if [ -f performance_results.json ]; then
python -c "
import json
with open('performance_results.json') as f:
data = json.load(f)
for name, result in data['results'].items():
if 'error' not in result:
print(f'| {name} | {result[\"avg_time\"]:.2f} | {result[\"avg_memory_mb\"]:.2f} |')
" >> $GITHUB_STEP_SUMMARY
else
echo "| N/A | N/A | N/A |" >> $GITHUB_STEP_SUMMARY
fi
performance-analysis:
name: Performance Analysis
runs-on: ubuntu-latest
needs: performance-test
permissions: {}
# Only analyze when full performance-test runs
if: (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && always()
steps:
- name: Download performance results
uses: actions/download-artifact@v8
with:
pattern: performance-results-*
merge-multiple: true
- name: Analyze performance trends
run: |
echo "=== Performance Analysis ==="
# List all result files
ls -la *.json || echo "No performance result files found"
# Create trend analysis (placeholder for future enhancement)
echo "Future enhancement: Trend analysis across multiple runs"
echo "This job will track performance over time and detect regressions"
performance-testing:
name: Performance Testing
runs-on: ubuntu-latest
needs: [performance-test, performance-analysis]
permissions: {}
# Only report final status when full suite runs
if: (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && always()
steps:
- name: Check performance test status
run: |
echo "Performance test status: ${{ needs.performance-test.result }}"
echo "Performance analysis status: ${{ needs.performance-analysis.result }}"
if [ "${{ needs.performance-test.result }}" != "success" ]; then
echo "Performance test failed"
exit 1
fi
echo "All performance tests completed successfully"