[GRDM-57239] Update repo2docker to 2026.02.0 and rdmfs to 2026.02.1 #218
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: E2E Test Execution | |
| on: | |
| push: | |
| branches: [ main, develop, feature/* ] | |
| pull_request: | |
| branches: [ main, develop ] | |
| workflow_dispatch: | |
| inputs: | |
| rdm_repository: | |
| description: 'RDM-osf.io repository (default: RCOSDP/RDM-osf.io)' | |
| required: false | |
| default: 'RCOSDP/RDM-osf.io' | |
| rdm_branch: | |
| description: 'RDM-osf.io branch to test against' | |
| required: false | |
| default: 'develop' | |
| merge_custom_rdm: | |
| description: 'Merge custom RDM branch into base branch before testing' | |
| required: false | |
| default: 'true' | |
| type: choice | |
| options: | |
| - 'true' | |
| - 'false' | |
| osf_image: | |
| description: 'Custom OSF image (default: niicloudoperation/rdm-osf.io:latest)' | |
| required: false | |
| default: '' | |
| ember_image: | |
| description: 'Custom Ember image (default: niicloudoperation/rdm-ember-osf-web:latest)' | |
| required: false | |
| default: '' | |
| cas_image: | |
| description: 'Custom CAS image (default: niicloudoperation/rdm-cas-overlay:latest)' | |
| required: false | |
| default: '' | |
| mfr_image: | |
| description: 'Custom MFR image (default: niicloudoperation/rdm-modular-file-renderer:latest)' | |
| required: false | |
| default: '' | |
| wb_image: | |
| description: 'Custom WaterButler image (default: niicloudoperation/rdm-waterbutler:latest)' | |
| required: false | |
| default: '' | |
| tljh_version: | |
| description: 'TLJH version (default: 1.0.0)' | |
| required: false | |
| default: '' | |
| tljh_plugin: | |
| description: 'tljh-repo2docker plugin (default: RCOSDP/CS-tljh-repo2docker@master)' | |
| required: false | |
| default: '' | |
| tljh_repo2docker_image: | |
| description: 'repo2docker Docker image (default: gcr.io/nii-ap-ops/repo2docker:2026.02.0)' | |
| required: false | |
| default: '' | |
| tljh_rdmfs_image: | |
| description: 'RDMFS Docker image (default: gcr.io/nii-ap-ops/rdmfs:2026.02.1)' | |
| required: false | |
| default: '' | |
| test_config: | |
| description: 'Test configuration file to use' | |
| required: false | |
| default: 'ci.config.yaml' | |
| skip_failed: | |
| description: 'Continue running tests even if some fail' | |
| required: false | |
| default: 'true' | |
| type: choice | |
| options: | |
| - 'true' | |
| - 'false' | |
| jobs: | |
| e2e-test: | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 120 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| test-group: | |
| - name: user | |
| display_name: "User Tests" | |
| include_admin: false | |
| skip_admin: true | |
| skip_metadata: false | |
| minio_enabled: false | |
| weko_enabled: false | |
| jupyterhub_enabled: false | |
| flowable_enabled: false | |
| - name: admin | |
| display_name: "Admin Tests" | |
| include_admin: true | |
| skip_admin: false | |
| skip_metadata: true | |
| skip_default_storage: true | |
| skip_login: true | |
| minio_enabled: false | |
| weko_enabled: false | |
| jupyterhub_enabled: false | |
| flowable_enabled: false | |
| exclude_notebooks: | | |
| テスト手順-管理者機能-ログイン可否設定.ipynb | |
| テスト手順-管理者機能-ノード管理.ipynb | |
| テスト手順-管理者機能-ユーザ管理.ipynb | |
| テスト手順-管理者機能-機関設定.ipynb | |
| テスト手順-管理者機能-RDMユーザメール.ipynb | |
| テスト手順-管理者機能-RDM登録.ipynb | |
| テスト手順-管理者機能-アドオン利用制御.ipynb | |
| テスト手順-管理者機能-利用統計.ipynb | |
| テスト手順-管理者機能-アナウンス.ipynb | |
| テスト手順-管理者機能-証跡管理.ipynb | |
| テスト手順-管理者機能-NIIストレージのクォータ.ipynb | |
| - name: migration-from-20250906 | |
| display_name: "Migration Test from 20250906" | |
| include_admin: true | |
| migration_from: | |
| repository: "RCOSDP/RDM-osf.io" | |
| commit: "4c007f2" | |
| osf_image: "niicloudoperation/rdm-osf.io:20250906" | |
| ember_image: "niicloudoperation/rdm-ember-osf-web:20250906" | |
| cas_image: "niicloudoperation/rdm-cas-overlay:20250906" | |
| mfr_image: "niicloudoperation/rdm-modular-file-renderer:20250906" | |
| wb_image: "niicloudoperation/rdm-waterbutler:20250906" | |
| initial_data_notebook: "取りまとめ-Migration前-20250906.ipynb" | |
| post_migration_notebook: "取りまとめ-Migration後-20250906.ipynb" | |
| exclude_notebooks: | | |
| テスト手順-管理者機能-ログイン可否設定.ipynb | |
| テスト手順-管理者機能-ノード管理.ipynb | |
| テスト手順-管理者機能-ユーザ管理.ipynb | |
| テスト手順-管理者機能-機関設定.ipynb | |
| テスト手順-管理者機能-RDMユーザメール.ipynb | |
| テスト手順-管理者機能-RDM登録.ipynb | |
| テスト手順-管理者機能-アドオン利用制御.ipynb | |
| テスト手順-管理者機能-利用統計.ipynb | |
| テスト手順-管理者機能-アナウンス.ipynb | |
| テスト手順-管理者機能-証跡管理.ipynb | |
| テスト手順-管理者機能-NIIストレージのクォータ.ipynb | |
| minio_enabled: false | |
| weko_enabled: false | |
| jupyterhub_enabled: false | |
| flowable_enabled: false | |
| - name: user-minio | |
| display_name: "User Tests (MinIO)" | |
| include_admin: false | |
| skip_admin: true | |
| skip_metadata: false | |
| skip_default_storage: true | |
| skip_login: true | |
| skip_130mb_upload: true | |
| minio_enabled: true | |
| weko_enabled: false | |
| jupyterhub_enabled: false | |
| flowable_enabled: false | |
| - name: user-weko | |
| display_name: "User Tests (WEKO)" | |
| include_admin: true | |
| skip_admin: true | |
| skip_metadata: true | |
| skip_default_storage: true | |
| skip_login: true | |
| skip_130mb_upload: true | |
| minio_enabled: false | |
| weko_enabled: true | |
| jupyterhub_enabled: false | |
| flowable_enabled: false | |
| - name: user-jupyterhub | |
| display_name: "User Tests (JupyterHub)" | |
| include_admin: false | |
| skip_admin: true | |
| skip_metadata: true | |
| skip_default_storage: true | |
| skip_login: true | |
| skip_130mb_upload: true | |
| minio_enabled: false | |
| weko_enabled: false | |
| jupyterhub_enabled: true | |
| flowable_enabled: false | |
| - name: user-workflow | |
| display_name: "User Tests (Flowable)" | |
| include_admin: true | |
| skip_admin: true | |
| skip_metadata: true | |
| skip_default_storage: true | |
| skip_login: true | |
| skip_130mb_upload: true | |
| minio_enabled: false | |
| weko_enabled: false | |
| jupyterhub_enabled: false | |
| flowable_enabled: true | |
| workflow_batch_project_count: 50 | |
| name: E2E ${{ matrix.test-group.display_name }} | |
| env: | |
| MINIO_ENABLED: ${{ matrix.test-group.minio_enabled == true && 'true' || 'false' }} | |
| WEKO_ENABLED: ${{ matrix.test-group.weko_enabled == true && 'true' || 'false' }} | |
| JUPYTERHUB_ENABLED: ${{ matrix.test-group.jupyterhub_enabled == true && 'true' || 'false' }} | |
| FLOWABLE_ENABLED: ${{ matrix.test-group.flowable_enabled == true && 'true' || 'false' }} | |
| MINIO_ROOT_USER: e2eadmin | |
| MINIO_ROOT_PASSWORD: e2epassword123 | |
| MINIO_ALIAS: ci-minio | |
| MINIO_ENDPOINT: http://minio:9000 | |
| MINIO_REGION: us-east-1 | |
| MINIO_SERVICE_NAME: MinIO (CI) | |
| steps: | |
| - name: Checkout test repository | |
| uses: actions/checkout@v4 | |
| with: | |
| path: e2e-tests | |
| - name: Determine RDM repository and branch | |
| id: rdm_config | |
| run: | | |
| BASE_REPO="RCOSDP/RDM-osf.io" | |
| BASE_BRANCH="develop" | |
| CUSTOM_REPO="" | |
| CUSTOM_BRANCH="" | |
| MERGE_REQUIRED="false" | |
| MERGE_FLAG="true" | |
| TARGET_REPO="$BASE_REPO" | |
| TARGET_BRANCH="$BASE_BRANCH" | |
| if [[ "${{ github.event_name }}" == "pull_request" ]]; then | |
| # Extract from PR body (using file to avoid shell injection) | |
| cat > /tmp/pr_body.txt << 'EOF' | |
| ${{ github.event.pull_request.body }} | |
| EOF | |
| RDM_REPO=$(grep -oP '^-\s*RDM_REPOSITORY:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' \r' || true) | |
| RDM_BRANCH=$(grep -oP '^-\s*RDM_BRANCH:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' \r' || true) | |
| RDM_MERGE=$(grep -oP '^-\s*RDM_MERGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' \r' | tr '[:upper:]' '[:lower:]' || true) | |
| if [[ -n "$RDM_MERGE" ]]; then | |
| case "$RDM_MERGE" in | |
| false|no|off) | |
| MERGE_FLAG="false" | |
| ;; | |
| true|yes|on) | |
| MERGE_FLAG="true" | |
| ;; | |
| esac | |
| fi | |
| if [[ -n "$RDM_REPO" || -n "$RDM_BRANCH" ]]; then | |
| CUSTOM_REPO="${RDM_REPO:-$BASE_REPO}" | |
| CUSTOM_BRANCH="${RDM_BRANCH:-$BASE_BRANCH}" | |
| fi | |
| else | |
| # Use workflow inputs or defaults | |
| INPUT_REPO="${{ github.event.inputs.rdm_repository || '' }}" | |
| INPUT_BRANCH="${{ github.event.inputs.rdm_branch || '' }}" | |
| INPUT_MERGE="${{ github.event.inputs.merge_custom_rdm || '' }}" | |
| if [[ -n "$INPUT_MERGE" ]]; then | |
| INPUT_MERGE=$(echo "$INPUT_MERGE" | tr '[:upper:]' '[:lower:]') | |
| case "$INPUT_MERGE" in | |
| false|no|off) | |
| MERGE_FLAG="false" | |
| ;; | |
| true|yes|on) | |
| MERGE_FLAG="true" | |
| ;; | |
| esac | |
| fi | |
| if [[ -n "$INPUT_REPO" || -n "$INPUT_BRANCH" ]]; then | |
| CUSTOM_REPO="${INPUT_REPO:-$BASE_REPO}" | |
| CUSTOM_BRANCH="${INPUT_BRANCH:-$BASE_BRANCH}" | |
| fi | |
| fi | |
| if [[ -n "$CUSTOM_REPO" || -n "$CUSTOM_BRANCH" ]]; then | |
| CUSTOM_REPO="${CUSTOM_REPO:-$BASE_REPO}" | |
| CUSTOM_BRANCH="${CUSTOM_BRANCH:-$BASE_BRANCH}" | |
| if [[ "$MERGE_FLAG" == "true" ]]; then | |
| MERGE_REQUIRED="true" | |
| else | |
| TARGET_REPO="$CUSTOM_REPO" | |
| TARGET_BRANCH="$CUSTOM_BRANCH" | |
| fi | |
| fi | |
| echo "Base RDM repository: $BASE_REPO @ $BASE_BRANCH" | |
| if [[ "$MERGE_REQUIRED" == "true" ]]; then | |
| echo "Merging custom RDM source: $CUSTOM_REPO @ $CUSTOM_BRANCH" | |
| elif [[ -n "$CUSTOM_REPO" || -n "$CUSTOM_BRANCH" ]]; then | |
| echo "Using custom RDM source without merge: $CUSTOM_REPO @ $CUSTOM_BRANCH" | |
| fi | |
| echo "Checkout target: $TARGET_REPO @ $TARGET_BRANCH" | |
| echo "repository=$TARGET_REPO" >> $GITHUB_OUTPUT | |
| echo "branch=$TARGET_BRANCH" >> $GITHUB_OUTPUT | |
| echo "custom_repository=$CUSTOM_REPO" >> $GITHUB_OUTPUT | |
| echo "custom_branch=$CUSTOM_BRANCH" >> $GITHUB_OUTPUT | |
| echo "merge_required=$MERGE_REQUIRED" >> $GITHUB_OUTPUT | |
| echo "merge_flag=$MERGE_FLAG" >> $GITHUB_OUTPUT | |
| - name: Checkout RDM-osf.io | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: ${{ steps.rdm_config.outputs.repository }} | |
| ref: ${{ steps.rdm_config.outputs.branch }} | |
| path: RDM-osf.io | |
| fetch-depth: 0 | |
| - name: Checkout WEKO repository | |
| if: env.WEKO_ENABLED == 'true' | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: RCOSDP/weko | |
| ref: develop_v2.0.0 | |
| path: weko | |
| - name: Checkout Flowable gateway repository | |
| if: env.FLOWABLE_ENABLED == 'true' | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: RCOSDP/RDM-flowable-gateway | |
| path: RDM-flowable-gateway | |
| - name: Prepare WEKO compose | |
| if: env.WEKO_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| .github/scripts/setup_weko.sh prepare ../weko | |
| - name: Prepare Flowable gateway | |
| if: env.FLOWABLE_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| .github/scripts/setup_flowable.sh prepare ../RDM-flowable-gateway ../RDM-osf.io | |
| - name: Merge custom RDM changes into develop | |
| if: steps.rdm_config.outputs.merge_required == 'true' | |
| working-directory: RDM-osf.io | |
| run: | | |
| CUSTOM_REPO="${{ steps.rdm_config.outputs.custom_repository }}" | |
| CUSTOM_BRANCH="${{ steps.rdm_config.outputs.custom_branch }}" | |
| BASE_REPO="${{ steps.rdm_config.outputs.repository }}" | |
| # Ensure committer identity is available before running merge commands | |
| git config user.name "github-actions[bot]" | |
| git config user.email "RDM-e2e-test-nb@example.com" | |
| if [[ "$CUSTOM_REPO" == "$BASE_REPO" ]]; then | |
| echo "Fetching branch $CUSTOM_BRANCH from origin" | |
| git fetch origin "$CUSTOM_BRANCH" | |
| git merge --no-ff --no-commit "origin/$CUSTOM_BRANCH" | |
| else | |
| echo "Fetching branch $CUSTOM_BRANCH from $CUSTOM_REPO" | |
| if git remote get-url custom >/dev/null 2>&1; then | |
| git remote set-url custom "https://github.com/${CUSTOM_REPO}.git" | |
| else | |
| git remote add custom "https://github.com/${CUSTOM_REPO}.git" | |
| fi | |
| git fetch custom "$CUSTOM_BRANCH" | |
| git merge --no-ff --no-commit FETCH_HEAD | |
| fi | |
| if git status --short | grep '^UU' >/dev/null; then | |
| echo "Merge conflicts detected; aborting." | |
| exit 1 | |
| fi | |
| if git diff --quiet --cached; then | |
| echo "No custom changes to merge; repository already up to date." | |
| else | |
| git commit --no-verify -m "Temporary merge of ${CUSTOM_REPO}@${CUSTOM_BRANCH} for E2E tests" | |
| fi | |
| - name: Create local reference for merged target | |
| working-directory: RDM-osf.io | |
| run: | | |
| git branch --force e2e-target HEAD | |
| - name: Setup migration source (migration mode) | |
| if: matrix.test-group.migration_from != null | |
| working-directory: RDM-osf.io | |
| run: | | |
| # Fetch origin to ensure we have the target version | |
| git fetch origin | |
| # Add migration source repository as remote if different | |
| if [ "${{ matrix.test-group.migration_from.repository }}" != "${{ steps.rdm_config.outputs.repository }}" ]; then | |
| git remote add migration-source https://github.com/${{ matrix.test-group.migration_from.repository }}.git | |
| git fetch migration-source | |
| fi | |
| # Checkout the migration source commit | |
| git checkout ${{ matrix.test-group.migration_from.commit }} | |
| - name: Configure Docker to use /mnt | |
| run: | | |
| # Stop Docker service | |
| sudo systemctl stop docker | |
| # Create Docker data directory on /mnt | |
| sudo mkdir -p /mnt/docker | |
| # Configure Docker to use /mnt | |
| sudo tee /etc/docker/daemon.json <<EOF | |
| { | |
| "data-root": "/mnt/docker" | |
| } | |
| EOF | |
| # Start Docker service | |
| sudo systemctl start docker | |
| # Verify Docker is using /mnt | |
| docker info | grep "Docker Root Dir" | |
| df -h | |
| - name: Set up Docker Buildx | |
| uses: docker/setup-buildx-action@v3 | |
| - name: Build Elasticsearch image with Kuromoji (amd64) | |
| working-directory: e2e-tests | |
| run: | | |
| docker build -t elasticsearch-kuromoji:amd64 \ | |
| -f docker/elasticsearch/Dockerfile.amd64 docker/elasticsearch | |
| echo "ELASTICSEARCH_IMAGE=elasticsearch-kuromoji:amd64" >> "$GITHUB_ENV" | |
| - name: Install Docker Compose | |
| run: | | |
| sudo curl -L "https://github.com/docker/compose/releases/download/v2.40.3/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose | |
| sudo chmod +x /usr/local/bin/docker-compose | |
| docker-compose --version | |
| - name: Setup host networking alias | |
| run: | | |
| # Add loopback alias for Ubuntu as per README-docker-compose.md | |
| sudo ifconfig lo:0 192.168.168.167 netmask 255.255.255.255 up | |
| - name: Setup initial environment for migration | |
| if: matrix.test-group.migration_from != null | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| # Setup configuration files | |
| export INCLUDE_ADMIN=${{ matrix.test-group.include_admin }} | |
| setup_config_files | |
| # Create docker-compose override with migration source images | |
| export OSF_IMAGE="${{ matrix.test-group.migration_from.osf_image }}" | |
| export EMBER_IMAGE="${{ matrix.test-group.migration_from.ember_image }}" | |
| export CAS_IMAGE="${{ matrix.test-group.migration_from.cas_image }}" | |
| export MFR_IMAGE="${{ matrix.test-group.migration_from.mfr_image }}" | |
| export WB_IMAGE="${{ matrix.test-group.migration_from.wb_image }}" | |
| create_docker_override | |
| # Install requirements | |
| install_requirements | |
| # Run migrations | |
| run_migrations | |
| # Enable feature flags | |
| export FEATURE_FLAGS="ember_edit_draft_registration_page" | |
| enable_feature_flags | |
| # Compile translations | |
| compile_translations | |
| # Start asset services after translations | |
| start_asset_services | |
| # Start services for migration source environment | |
| start_rdm_services | |
| # Test endpoints | |
| test_rdm_endpoints | |
| - name: Set up Python for E2E tests | |
| uses: actions/setup-python@v4 | |
| with: | |
| python-version: '3.12' | |
| - name: Install E2E test dependencies | |
| working-directory: e2e-tests | |
| run: | | |
| # Install without cache to save disk space | |
| python -m pip install --upgrade pip | |
| pip install --no-cache-dir -r requirements.txt | |
| pip install --no-cache-dir papermill | |
| # Clear pip cache if any remains | |
| pip cache purge || true | |
| - name: Setup Node.js for Playwright | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '18' | |
| - name: Install Playwright | |
| run: | | |
| # Install Playwright browsers with OS dependencies | |
| playwright install --with-deps chromium firefox | |
| - name: Create test data for migration source | |
| if: matrix.test-group.migration_from != null | |
| working-directory: RDM-osf.io | |
| run: | | |
| echo "Creating test data in migration source environment..." | |
| # Setup institutions first (required for setup_test_data.py) | |
| docker-compose run --rm web python3 -m scripts.populate_institutions -e test -a | |
| # Copy the setup script to the container | |
| docker cp ${{ github.workspace }}/e2e-tests/.github/scripts/setup_test_data.py "$(docker-compose ps -q web)":/tmp/setup_test_data.py | |
| # Create test data that will be migrated | |
| docker-compose exec -T web bash -c "python3 manage.py shell < /tmp/setup_test_data.py" | |
| echo "Migration source test data created" | |
| - name: Run migration-specific initial data script | |
| if: matrix.test-group.migration_from.initial_data_script != null | |
| working-directory: RDM-osf.io | |
| run: | | |
| echo "Running migration-specific initial data script..." | |
| # Copy the migration-specific script to the container | |
| docker cp ${{ github.workspace }}/e2e-tests/migrations/${{ matrix.test-group.migration_from.initial_data_script }} "$(docker-compose ps -q web)":/tmp/migration_init.py | |
| # Execute the migration-specific initial data script | |
| docker-compose exec -T web bash -c "python3 manage.py shell < /tmp/migration_init.py" | |
| echo "Migration-specific initial data created" | |
| - name: Run migration-specific initial data notebook | |
| if: matrix.test-group.migration_from.initial_data_notebook != null | |
| working-directory: e2e-tests/migrations | |
| run: | | |
| echo "Running migration-specific initial data notebook..." | |
| # Create result directory for migration initial data | |
| mkdir -p ../result | |
| # Execute the notebook against the migration source environment | |
| papermill ${{ matrix.test-group.migration_from.initial_data_notebook }} \ | |
| ../result/migration_initial_${{ matrix.test-group.migration_from.initial_data_notebook }} \ | |
| -p rdm_url "http://192.168.168.167:5000/" \ | |
| -p admin_rdm_url "http://192.168.168.167:8001/" \ | |
| -p idp_name_1 "FakeCAS" \ | |
| -p idp_username_1 "testuser1@example.com" \ | |
| -p idp_password_1 "testpass123" \ | |
| -p idp_name_2 "FakeCAS" \ | |
| -p idp_username_2 "testuser2@example.com" \ | |
| -p idp_password_2 "testpass456" \ | |
| -p default_result_path "../result" \ | |
| -p transition_timeout 60000 | |
| echo "Migration-specific initial data notebook executed" | |
| - name: Stop migration source environment | |
| if: matrix.test-group.migration_from != null | |
| working-directory: RDM-osf.io | |
| run: | | |
| echo "Stopping migration source environment..." | |
| docker-compose down | |
| echo "Migration source environment stopped" | |
| - name: Switch to migration target version | |
| if: matrix.test-group.migration_from != null | |
| working-directory: RDM-osf.io | |
| run: | | |
| echo "Switching to migration target version..." | |
| # Reset to the merged target state captured earlier | |
| git checkout -f e2e-target | |
| echo "Switched to target version: e2e-target" | |
| - name: Create required configuration files for RDM | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| # Setup configuration files | |
| export INCLUDE_ADMIN=${{ matrix.test-group.include_admin }} | |
| setup_config_files | |
| - name: Create docker-compose override for NII Cloud Operation images | |
| working-directory: RDM-osf.io | |
| run: | | |
| # Check for image override in PR body or use workflow inputs | |
| if [[ "${{ github.event_name }}" == "pull_request" ]]; then | |
| # Extract image overrides from PR description if present | |
| # Format: - OSF_IMAGE: yacchin1205/rdm2-osf.io:feature_ror-v2 | |
| cat > /tmp/pr_body.txt << 'EOF' | |
| ${{ github.event.pull_request.body }} | |
| EOF | |
| OSF_OVERRIDE=$(grep -oP '^-\s*OSF_IMAGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' ' || true) | |
| EMBER_OVERRIDE=$(grep -oP '^-\s*EMBER_IMAGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' ' || true) | |
| CAS_OVERRIDE=$(grep -oP '^-\s*CAS_IMAGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' ' || true) | |
| MFR_OVERRIDE=$(grep -oP '^-\s*MFR_IMAGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' ' || true) | |
| WB_OVERRIDE=$(grep -oP '^-\s*WB_IMAGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' ' || true) | |
| EXCLUDE_NOTEBOOKS_OVERRIDE=$(grep -oP '^-\s*EXCLUDE_NOTEBOOKS:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d '\r' || true) | |
| # Extract TLJH parameters from PR body | |
| TLJH_VERSION_OVERRIDE=$(grep -oP '^-\s*TLJH_VERSION:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' \r' || true) | |
| TLJH_PLUGIN_OVERRIDE=$(grep -oP '^-\s*TLJH_PLUGIN:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' \r' || true) | |
| TLJH_REPO2DOCKER_IMAGE_OVERRIDE=$(grep -oP '^-\s*TLJH_REPO2DOCKER_IMAGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' \r' || true) | |
| TLJH_RDMFS_IMAGE_OVERRIDE=$(grep -oP '^-\s*TLJH_RDMFS_IMAGE:\s*\K.*$' /tmp/pr_body.txt | head -1 | tr -d ' \r' || true) | |
| # Export for use in later steps | |
| echo "EXCLUDE_NOTEBOOKS=$EXCLUDE_NOTEBOOKS_OVERRIDE" >> $GITHUB_ENV | |
| echo "TLJH_VERSION_OVERRIDE=$TLJH_VERSION_OVERRIDE" >> $GITHUB_ENV | |
| echo "TLJH_PLUGIN_OVERRIDE=$TLJH_PLUGIN_OVERRIDE" >> $GITHUB_ENV | |
| echo "TLJH_REPO2DOCKER_IMAGE_OVERRIDE=$TLJH_REPO2DOCKER_IMAGE_OVERRIDE" >> $GITHUB_ENV | |
| echo "TLJH_RDMFS_IMAGE_OVERRIDE=$TLJH_RDMFS_IMAGE_OVERRIDE" >> $GITHUB_ENV | |
| # Use overrides if found, otherwise defaults | |
| export OSF_IMAGE="${OSF_OVERRIDE:-niicloudoperation/rdm-osf.io:latest}" | |
| export EMBER_IMAGE="${EMBER_OVERRIDE:-niicloudoperation/rdm-ember-osf-web:latest}" | |
| export CAS_IMAGE="${CAS_OVERRIDE:-niicloudoperation/rdm-cas-overlay:latest}" | |
| export MFR_IMAGE="${MFR_OVERRIDE:-niicloudoperation/rdm-modular-file-renderer:latest}" | |
| export WB_IMAGE="${WB_OVERRIDE:-niicloudoperation/rdm-waterbutler:latest}" | |
| else | |
| # Use workflow dispatch inputs or defaults | |
| export OSF_IMAGE="${{ github.event.inputs.osf_image || 'niicloudoperation/rdm-osf.io:latest' }}" | |
| export EMBER_IMAGE="${{ github.event.inputs.ember_image || 'niicloudoperation/rdm-ember-osf-web:latest' }}" | |
| export CAS_IMAGE="${{ github.event.inputs.cas_image || 'niicloudoperation/rdm-cas-overlay:latest' }}" | |
| export MFR_IMAGE="${{ github.event.inputs.mfr_image || 'niicloudoperation/rdm-modular-file-renderer:latest' }}" | |
| export WB_IMAGE="${{ github.event.inputs.wb_image || 'niicloudoperation/rdm-waterbutler:latest' }}" | |
| fi | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| create_docker_override | |
| - name: Install RDM requirements | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| install_requirements | |
| timeout-minutes: 15 | |
| - name: Run Django migrations | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| run_migrations | |
| - name: Enable feature flags | |
| if: matrix.test-group.migration_from == null | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| export FEATURE_FLAGS="ember_edit_draft_registration_page" | |
| enable_feature_flags | |
| - name: Compile translations | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| export INCLUDE_ADMIN=${{ matrix.test-group.include_admin }} | |
| compile_translations | |
| # Start asset services immediately after translations | |
| start_asset_services | |
| - name: Start all RDM services | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| export INCLUDE_ADMIN=${{ matrix.test-group.include_admin }} | |
| start_rdm_services | |
| - name: Register OAuth scopes | |
| working-directory: RDM-osf.io | |
| run: | | |
| docker-compose run --rm web python3 -m scripts.register_oauth_scopes | |
| - name: Seed KAKEN Elasticsearch data | |
| working-directory: RDM-osf.io | |
| run: | | |
| set -xeu | |
| NRID_URL="https://nrid.nii.ac.jp/nrid/1000050735975.json" | |
| JSON_FILE=$(mktemp) | |
| curl -fL "$NRID_URL" -o "$JSON_FILE" | |
| docker cp "$JSON_FILE" "$(docker-compose ps -q web)":/tmp/kaken_seed.json | |
| docker cp ${{ github.workspace }}/e2e-tests/.github/scripts/seed_kaken_es.py "$(docker-compose ps -q web)":/tmp/seed_kaken_es.py | |
| docker-compose exec -T web python3 /tmp/seed_kaken_es.py /tmp/kaken_seed.json "$NRID_URL" | |
| rm "$JSON_FILE" | |
| - name: Initialize MinIO for S3 compatibility tests | |
| if: env.MINIO_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| set -eu | |
| key_part() { | |
| LC_ALL=C tr -dc 'a-z0-9' < /dev/urandom | head -c 18 | |
| } | |
| secret_part() { | |
| LC_ALL=C tr -dc 'A-Za-z0-9' < /dev/urandom | head -c 40 | |
| } | |
| ACCESS_KEY_1="ci$(key_part)" | |
| SECRET_KEY_1="$(secret_part)" | |
| ACCESS_KEY_2="ci$(key_part)" | |
| SECRET_KEY_2="$(secret_part)" | |
| BUCKET_PREFIX="ci-${GITHUB_RUN_ID:-0}-${GITHUB_RUN_ATTEMPT:-0}" | |
| BUCKET_NAME_1="${BUCKET_PREFIX}-a" | |
| BUCKET_NAME_2="${BUCKET_PREFIX}-b" | |
| export MINIO_VERIFY_LARGE_UPLOAD=true | |
| .github/scripts/setup_minio_buckets.sh \ | |
| ../RDM-osf.io \ | |
| "${MINIO_ALIAS}" \ | |
| "${MINIO_ENDPOINT}" \ | |
| "${MINIO_ROOT_USER}" \ | |
| "${MINIO_ROOT_PASSWORD}" \ | |
| "${ACCESS_KEY_1}" "${SECRET_KEY_1}" "${BUCKET_NAME_1}" \ | |
| "${ACCESS_KEY_2}" "${SECRET_KEY_2}" "${BUCKET_NAME_2}" | |
| echo "S3COMPAT_ACCESS_KEY_1=${ACCESS_KEY_1}" >> $GITHUB_ENV | |
| echo "S3COMPAT_SECRET_KEY_1=${SECRET_KEY_1}" >> $GITHUB_ENV | |
| echo "S3COMPAT_ACCESS_KEY_2=${ACCESS_KEY_2}" >> $GITHUB_ENV | |
| echo "S3COMPAT_SECRET_KEY_2=${SECRET_KEY_2}" >> $GITHUB_ENV | |
| echo "S3COMPAT_BUCKET_NAME_1=${BUCKET_NAME_1}" >> $GITHUB_ENV | |
| echo "S3COMPAT_BUCKET_NAME_2=${BUCKET_NAME_2}" >> $GITHUB_ENV | |
| echo "S3COMPAT_REGION=${MINIO_REGION}" >> $GITHUB_ENV | |
| echo "S3COMPAT_ENDPOINT=${MINIO_ENDPOINT}" >> $GITHUB_ENV | |
| echo "S3COMPAT_SERVICE_NAME=${MINIO_SERVICE_NAME}" >> $GITHUB_ENV | |
| - name: Start MinIO trace | |
| if: env.MINIO_ENABLED == 'true' | |
| working-directory: RDM-osf.io | |
| run: | | |
| set -eu | |
| docker-compose run -d --name minio-trace --entrypoint /bin/sh minio-mc -c " | |
| set -euo pipefail | |
| mc alias set ${MINIO_ALIAS} ${MINIO_ENDPOINT} ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD} | |
| mc admin trace --json --verbose --errors --path '${S3COMPAT_BUCKET_NAME_1}/*' --path '${S3COMPAT_BUCKET_NAME_2}/*' ${MINIO_ALIAS} | |
| " | |
| - name: Test endpoint accessibility | |
| working-directory: RDM-osf.io | |
| run: | | |
| source ${{ github.workspace }}/e2e-tests/.github/scripts/setup_rdm.sh | |
| export INCLUDE_ADMIN=${{ matrix.test-group.include_admin }} | |
| test_rdm_endpoints | |
| - name: Install WEKO stack | |
| if: env.WEKO_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| .github/scripts/setup_weko.sh install ../weko | |
| - name: Configure WEKO connection info | |
| if: env.WEKO_ENABLED == 'true' | |
| run: | | |
| echo "WEKO_URL=https://192.168.168.167" >> $GITHUB_ENV | |
| echo "WEKO_ADMIN_EMAIL=wekosoftware@nii.ac.jp" >> $GITHUB_ENV # gitleaks:allow | |
| echo "WEKO_ADMIN_PASSWORD=uspass123" >> $GITHUB_ENV | |
| echo "WEKO_USER_EMAIL=contributor@example.org" >> $GITHUB_ENV # gitleaks:allow | |
| echo "WEKO_USER_PASSWORD=uspass123" >> $GITHUB_ENV | |
| echo "WEKO_INSTITUTION_NAME=Virginia Tech [Test]" >> $GITHUB_ENV | |
| echo "WEKO_INDEX_NAME=Sample Index" >> $GITHUB_ENV | |
| echo "WEKO_DOCKER_COMPOSE_PATH=${{ github.workspace }}/weko/docker-compose2.yml" >> $GITHUB_ENV | |
| echo "IGNORE_HTTPS_ERRORS=true" >> $GITHUB_ENV | |
| - name: Verify RDM containers can connect to WEKO via HTTPS | |
| if: env.WEKO_ENABLED == 'true' | |
| working-directory: RDM-osf.io | |
| run: | | |
| set -xeu | |
| echo "=== RDM to WEKO HTTPS Connection Verification ===" | |
| # web container uses requests (certifi) | |
| echo "Testing web (requests)..." | |
| docker-compose exec -T web python3 -c "import requests; print('CA bundle:', requests.certs.where()); r = requests.get('https://192.168.168.167', timeout=30); print('Status:', r.status_code)" | |
| # wb container uses aiohttp (OpenSSL system certs) | |
| echo "Testing wb (aiohttp)..." | |
| cat "${{ github.workspace }}/e2e-tests/.github/scripts/verify_weko_https.py" | docker-compose exec -T wb python3 | |
| - name: Install Flowable stack | |
| if: env.FLOWABLE_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| .github/scripts/setup_flowable.sh install ../RDM-flowable-gateway | |
| - name: Configure Flowable connection info | |
| if: env.FLOWABLE_ENABLED == 'true' | |
| run: | | |
| echo "GATEWAY_BASE_URL=http://192.168.168.167:8088/" >> $GITHUB_ENV | |
| echo "WORKFLOW_BATCH_PROJECT_COUNT=${{ matrix.test-group.workflow_batch_project_count || 50 }}" >> $GITHUB_ENV | |
| - name: Install TLJH stack | |
| if: env.JUPYTERHUB_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| env: | |
| TLJH_VERSION: ${{ env.TLJH_VERSION_OVERRIDE || github.event.inputs.tljh_version || '1.0.0' }} | |
| TLJH_PLUGIN: ${{ env.TLJH_PLUGIN_OVERRIDE || github.event.inputs.tljh_plugin || 'RCOSDP/CS-tljh-repo2docker@master' }} | |
| REPO2DOCKER_IMAGE: ${{ env.TLJH_REPO2DOCKER_IMAGE_OVERRIDE || github.event.inputs.tljh_repo2docker_image || 'gcr.io/nii-ap-ops/repo2docker:2026.02.0' }} | |
| RDMFS_IMAGE: ${{ env.TLJH_RDMFS_IMAGE_OVERRIDE || github.event.inputs.tljh_rdmfs_image || 'gcr.io/nii-ap-ops/rdmfs:2026.02.1' }} | |
| run: | | |
| .github/scripts/setup_tljh.sh install | |
| - name: Configure TLJH connection info | |
| if: env.JUPYTERHUB_ENABLED == 'true' | |
| run: | | |
| echo "TLJH_URL=http://localhost" >> $GITHUB_ENV | |
| echo "TLJH_USERNAME=admin" >> $GITHUB_ENV | |
| echo "TLJH_PASSWORD=change-your-password" >> $GITHUB_ENV | |
| - name: Run post-migration tests | |
| if: matrix.test-group.migration_from.post_migration_notebook != null | |
| working-directory: e2e-tests/migrations | |
| run: | | |
| echo "Running post-migration tests..." | |
| # Create result directory (parent already exists from checkout) | |
| mkdir -p ../result | |
| # Execute the post-migration test notebook | |
| papermill ${{ matrix.test-group.migration_from.post_migration_notebook }} \ | |
| ../result/${{ matrix.test-group.migration_from.post_migration_notebook }} \ | |
| -p rdm_url "http://192.168.168.167:5000/" \ | |
| -p admin_rdm_url "http://192.168.168.167:8001/" \ | |
| -p idp_name_1 "FakeCAS" \ | |
| -p idp_username_1 "testuser1@example.com" \ | |
| -p idp_password_1 "testpass123" \ | |
| -p idp_name_2 "FakeCAS" \ | |
| -p idp_username_2 "testuser2@example.com" \ | |
| -p idp_password_2 "testpass456" \ | |
| -p default_result_path "../result" \ | |
| -p transition_timeout 60000 | |
| echo "Post-migration tests completed" | |
| - name: Create test users and projects | |
| working-directory: RDM-osf.io | |
| run: | | |
| # Setup institutions first | |
| docker-compose run --rm web python3 -m scripts.populate_institutions -e test -a | |
| # Copy the setup script to the container | |
| docker cp ${{ github.workspace }}/e2e-tests/.github/scripts/setup_test_data.py "$(docker-compose ps -q web)":/tmp/setup_test_data.py | |
| # Execute the script and capture project IDs | |
| docker-compose exec -T web bash -c "python3 manage.py shell < /tmp/setup_test_data.py" | tee /tmp/setup_output.txt | |
| # Extract project IDs and names from output | |
| PROJECT_ID_1=$(grep "PROJECT_ID_testuser1@example.com:" /tmp/setup_output.txt | cut -d' ' -f2) | |
| PROJECT_ID_2=$(grep "PROJECT_ID_testuser2@example.com:" /tmp/setup_output.txt | cut -d' ' -f2) | |
| PROJECT_NAME_1=$(grep "PROJECT_NAME_testuser1@example.com:" /tmp/setup_output.txt | cut -d' ' -f2-) | |
| PROJECT_NAME_2=$(grep "PROJECT_NAME_testuser2@example.com:" /tmp/setup_output.txt | cut -d' ' -f2-) | |
| # Verify project IDs were created | |
| if [ -z "${PROJECT_ID_1}" ] || [ -z "${PROJECT_ID_2}" ]; then | |
| echo "ERROR: Failed to create projects for test users" | |
| echo "PROJECT_ID_1: ${PROJECT_ID_1}" | |
| echo "PROJECT_ID_2: ${PROJECT_ID_2}" | |
| exit 1 | |
| fi | |
| # Export for later steps | |
| echo "PROJECT_ID_1=${PROJECT_ID_1}" >> $GITHUB_ENV | |
| echo "PROJECT_ID_2=${PROJECT_ID_2}" >> $GITHUB_ENV | |
| echo "PROJECT_NAME_1=${PROJECT_NAME_1}" >> $GITHUB_ENV | |
| echo "PROJECT_NAME_2=${PROJECT_NAME_2}" >> $GITHUB_ENV | |
| echo "Projects created successfully:" | |
| echo " testuser1: ${PROJECT_ID_1} - ${PROJECT_NAME_1}" | |
| echo " testuser2: ${PROJECT_ID_2} - ${PROJECT_NAME_2}" | |
| - name: Register e-Rad data | |
| working-directory: RDM-osf.io | |
| run: | | |
| # Copy e-Rad sample data to container | |
| docker cp ${{ github.workspace }}/e2e-tests/erad_sample.csv "$(docker-compose ps -q web)":/tmp/erad_sample.csv | |
| # Register e-Rad metadata | |
| echo "Registering e-Rad test data..." | |
| docker-compose exec -T web python3 -m scripts.register_erad_metadata /tmp/erad_sample.csv | |
| echo "e-Rad data registered successfully" | |
| - name: Prepare test configuration | |
| working-directory: e2e-tests | |
| run: | | |
| cat > base_ci.config.yaml <<'EOF' | |
| # CI Test Configuration - ${{ matrix.test-group.display_name }} | |
| rdm_url: 'http://192.168.168.167:5000/' | |
| admin_rdm_url: 'http://192.168.168.167:8001/' | |
| # Test users (created above) | |
| idp_name_1: 'FakeCAS' | |
| idp_username_1: 'testuser1@example.com' | |
| idp_password_1: 'testpass123' | |
| idp_name_2: 'FakeCAS' | |
| idp_username_2: 'testuser2@example.com' | |
| idp_password_2: 'testpass456' | |
| # Admin users for admin page tests | |
| idp_name_integrated_admin: 'FakeCAS' | |
| idp_username_integrated_admin: 'testuser1@example.com' | |
| idp_password_integrated_admin: 'testpass123' | |
| idp_name_institutional_admin: 'FakeCAS' | |
| idp_username_institutional_admin: 'teststaff@example.com' | |
| idp_password_institutional_admin: 'testpass789' | |
| idp_name_non_admin: 'FakeCAS' | |
| idp_username_non_admin: 'testuser2@example.com' | |
| idp_password_non_admin: 'testpass456' | |
| idp_name_other_institution: 'FakeCAS' | |
| idp_username_other_institution: 'testuser3@example.com' | |
| idp_password_other_institution: 'testpass321' | |
| institution_name: 'Virginia Tech [Test]' | |
| # Test project URLs (using created project IDs and names) | |
| rdm_project_url_1: 'http://192.168.168.167:5000/${{ env.PROJECT_ID_1 }}/' | |
| rdm_project_name_1: '${{ env.PROJECT_NAME_1 }}' | |
| rdm_project_url_2: 'http://192.168.168.167:5000/${{ env.PROJECT_ID_2 }}/' | |
| # Test settings for ${{ matrix.test-group.name }} group | |
| skip_failed_test: true # Continue on failure | |
| transition_timeout: 60000 | |
| skip_preview_check: true | |
| skip_130mb_upload: ${{ matrix.test-group.skip_130mb_upload || false }} | |
| skip_default_storage: ${{ matrix.test-group.skip_default_storage || false }} | |
| skip_metadata: ${{ matrix.test-group.skip_metadata }} | |
| skip_admin: ${{ matrix.test-group.skip_admin }} | |
| skip_login: ${{ matrix.test-group.skip_login || false }} | |
| weko_enabled: ${{ matrix.test-group.weko_enabled == true }} | |
| enable_1gb_file_upload: false | |
| skip_erad_completion_test: false | |
| storages_oauth: [] | |
| EOF | |
| args=(ci.config.yaml base_ci.config.yaml) | |
| if [ "${MINIO_ENABLED}" = "true" ]; then | |
| args+=(--minio) | |
| fi | |
| if [ "${JUPYTERHUB_ENABLED}" = "true" ]; then | |
| args+=(--jupyterhub) | |
| fi | |
| if [ "${WEKO_ENABLED}" = "true" ]; then | |
| args+=(--weko) | |
| fi | |
| if [ "${FLOWABLE_ENABLED}" = "true" ]; then | |
| args+=(--flowable) | |
| fi | |
| .github/scripts/generate_ci_config.sh "${args[@]}" | |
| rm base_ci.config.yaml | |
| # Add exclude_notebooks - merge both PR body and matrix configuration | |
| if [ -n "$EXCLUDE_NOTEBOOKS" ] || [ -n "${{ matrix.test-group.exclude_notebooks }}" ]; then | |
| echo "" >> ci.config.yaml | |
| echo "# Exclude specific notebooks (merged from PR and matrix)" >> ci.config.yaml | |
| echo "exclude_notebooks:" >> ci.config.yaml | |
| # Add matrix excludes first | |
| if [ -n "${{ matrix.test-group.exclude_notebooks }}" ]; then | |
| echo "${{ matrix.test-group.exclude_notebooks }}" | while read notebook; do | |
| if [ -n "$notebook" ]; then | |
| echo " - '$notebook'" >> ci.config.yaml | |
| fi | |
| done | |
| fi | |
| # Add PR excludes (comma-separated) | |
| if [ -n "$EXCLUDE_NOTEBOOKS" ]; then | |
| echo "$EXCLUDE_NOTEBOOKS" | tr ',' '\n' | while read notebook; do | |
| if [ -n "$notebook" ]; then | |
| # Trim whitespace | |
| notebook=$(echo "$notebook" | xargs) | |
| echo " - '$notebook'" >> ci.config.yaml | |
| fi | |
| done | |
| fi | |
| fi | |
| echo "Generated CI configuration for ${{ matrix.test-group.display_name }}:" | |
| cat ci.config.yaml | |
| - name: Run E2E tests | |
| working-directory: e2e-tests | |
| run: | | |
| # Create result directories | |
| mkdir -p result | |
| mkdir -p result-failed | |
| # Run the automated test runner with failed notebook extraction | |
| python run_tests.py ci.config.yaml --failed-result-path result-failed | |
| - name: Extract GRDM ticket from PR | |
| if: always() && github.event_name == 'pull_request' | |
| id: extract-ticket | |
| run: | | |
| # Extract GRDM ticket from PR body (using file to avoid shell injection) | |
| cat > /tmp/pr_body.txt << 'EOF' | |
| ${{ github.event.pull_request.body }} | |
| EOF | |
| # Extract GRDM-XXXXX from Ticket section (between ## Ticket and next ## or end) | |
| TICKET=$(sed -n '/## Ticket/,/^##/p' /tmp/pr_body.txt | grep -oE "GRDM-[0-9]+" | head -1 || echo "") | |
| if [ -z "$TICKET" ]; then | |
| TICKET="PR-${{ github.event.pull_request.number }}" | |
| fi | |
| echo "ticket=$TICKET" >> $GITHUB_OUTPUT | |
| echo "Extracted ticket: $TICKET" | |
| - name: Generate Excel summary | |
| if: always() | |
| working-directory: e2e-tests | |
| run: | | |
| # Install required dependencies for Excel generation | |
| pip install openpyxl | |
| # Determine ticket number | |
| if [ "${{ github.event_name }}" == "pull_request" ]; then | |
| TICKET="${{ steps.extract-ticket.outputs.ticket }}" | |
| else | |
| TICKET="ACTIONS-${{ github.run_number }}" | |
| fi | |
| # Generate Excel summary from test results | |
| python .github/scripts/generate_excel_summary.py result/ "GitHub Actions" "$TICKET" | |
| # List generated Excel files and screenshots | |
| ls -la result/test-summary-*.xlsx || echo "No Excel summary generated" | |
| ls -la result/screenshots/ || echo "No screenshots generated" | |
| - name: Upload Excel summary with screenshots | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: test-summary-excel-${{ matrix.test-group.name }} | |
| path: | | |
| e2e-tests/result/test-summary-*.xlsx | |
| e2e-tests/result/screenshots/ | |
| retention-days: 30 | |
| id: excel-upload | |
| - name: Upload failed test results | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: test-results-failed-${{ matrix.test-group.name }} | |
| path: e2e-tests/result-failed/ | |
| retention-days: 30 | |
| - name: Upload full test results | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: test-results-full-${{ matrix.test-group.name }} | |
| path: e2e-tests/result/ | |
| retention-days: 7 | |
| - name: Collect MinIO trace | |
| if: always() && env.MINIO_ENABLED == 'true' | |
| working-directory: RDM-osf.io | |
| run: | | |
| set -eu | |
| if docker ps -a --format '{{.Names}}' | grep -q '^minio-trace$'; then | |
| docker logs minio-trace > ../e2e-tests/minio-trace.log || true | |
| docker rm -f minio-trace || true | |
| fi | |
| - name: Upload MinIO trace | |
| if: always() && env.MINIO_ENABLED == 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: minio-trace-${{ matrix.test-group.name }} | |
| path: e2e-tests/minio-trace.log | |
| if-no-files-found: ignore | |
| - name: Collect WEKO logs | |
| if: always() && env.WEKO_ENABLED == 'true' | |
| working-directory: weko | |
| run: | | |
| docker compose -f docker-compose2.yml logs > ../e2e-tests/weko-docker-logs.txt 2>&1 || true | |
| - name: Upload WEKO logs | |
| if: always() && env.WEKO_ENABLED == 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: weko-logs-${{ matrix.test-group.name }} | |
| path: e2e-tests/weko-docker-logs.txt | |
| retention-days: 7 | |
| - name: Collect TLJH logs | |
| if: always() && env.JUPYTERHUB_ENABLED == 'true' | |
| run: | | |
| sudo journalctl -u jupyterhub > e2e-tests/jupyterhub-logs.txt 2>&1 || true | |
| - name: Upload TLJH logs | |
| if: always() && env.JUPYTERHUB_ENABLED == 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: tljh-logs-${{ matrix.test-group.name }} | |
| path: e2e-tests/jupyterhub-logs.txt | |
| retention-days: 7 | |
| - name: Collect Flowable logs | |
| if: always() && env.FLOWABLE_ENABLED == 'true' | |
| working-directory: RDM-flowable-gateway | |
| run: | | |
| docker compose logs > ../e2e-tests/flowable-docker-logs.txt 2>&1 || true | |
| - name: Upload Flowable logs | |
| if: always() && env.FLOWABLE_ENABLED == 'true' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: flowable-logs-${{ matrix.test-group.name }} | |
| path: e2e-tests/flowable-docker-logs.txt | |
| retention-days: 7 | |
| - name: Stop WEKO stack | |
| if: always() && env.WEKO_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| .github/scripts/setup_weko.sh down ../weko | |
| - name: Stop TLJH stack | |
| if: always() && env.JUPYTERHUB_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| .github/scripts/setup_tljh.sh down | |
| - name: Stop Flowable stack | |
| if: always() && env.FLOWABLE_ENABLED == 'true' | |
| working-directory: e2e-tests | |
| run: | | |
| .github/scripts/setup_flowable.sh down ../RDM-flowable-gateway | |
| - name: Cleanup and save logs | |
| if: always() | |
| run: | | |
| cd RDM-osf.io | |
| # Display final service status | |
| echo "=== Final service status ===" | |
| docker-compose ps | |
| echo "=== Service resource usage ===" | |
| docker stats --no-stream | |
| # Save logs for debugging | |
| docker-compose logs > ../e2e-tests/docker-compose-logs.txt 2>&1 || true | |
| # Stop all services | |
| docker-compose down -v | |
| # Clean up Docker resources | |
| docker system prune -f | |
| - name: Upload Docker logs | |
| if: failure() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: docker-compose-logs-${{ matrix.test-group.name }} | |
| path: e2e-tests/docker-compose-logs.txt | |
| retention-days: 7 |