Release 1.5.0 #3586
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Docs | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }} | |
| cancel-in-progress: true | |
| on: | |
| push: | |
| branches: [ master, develop ] | |
| pull_request: | |
| branches: [ master, develop ] | |
| permissions: | |
| contents: write | |
| pages: write | |
| id-token: write | |
| jobs: | |
| build_docs: | |
| runs-on: ${{ matrix.os }} | |
| env: | |
| MNE_DATA: ${{ github.workspace }}/mne_data | |
| strategy: | |
| fail-fast: true | |
| matrix: | |
| os: [ ubuntu-latest ] | |
| python-version: [ "3.10" ] | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Install uv | |
| uses: astral-sh/setup-uv@v5 | |
| with: | |
| python-version: ${{ matrix.python-version }} | |
| enable-cache: true | |
| - name: Restore MNE Data Cache | |
| id: cache-mne_data | |
| uses: actions/cache/restore@v4 | |
| with: | |
| path: | | |
| mne_data | |
| ~/.mne | |
| key: doc-${{ runner.os }}-mne-data-v3 | |
| restore-keys: | | |
| doc-${{ runner.os }}-mne-data-v3- | |
| - name: Clean up corrupted cache | |
| run: | | |
| echo "Cache hit: ${{ steps.cache-mne_data.outputs.cache-hit }}" | |
| echo "Cache matched key: ${{ steps.cache-mne_data.outputs.cache-matched-key }}" | |
| mkdir -p mne_data | |
| # Remove any incomplete extractions (e.g., BIDS.zip.unzip alongside final folder) | |
| if [ -d mne_data/BIDS.zip.unzip ]; then | |
| echo "Removing incomplete extraction: BIDS.zip.unzip" | |
| rm -rf mne_data/BIDS.zip.unzip | |
| fi | |
| if [ -d mne_data/MNE-RomaniBF2025ERP-data ] && [ -f mne_data/MNE-RomaniBF2025ERP-data/.cache-incomplete ]; then | |
| echo "Removing incomplete BrainForm dataset" | |
| rm -rf mne_data/MNE-RomaniBF2025ERP-data | |
| fi | |
| # Remove stale evaluation results to avoid race conditions | |
| # between parallel sphinx-gallery examples sharing HDF5 files | |
| if [ -d ~/mne_data/results ]; then | |
| echo "Removing stale evaluation results" | |
| rm -rf ~/mne_data/results | |
| fi | |
| - name: Cache docs build | |
| id: cache-docs | |
| uses: actions/cache@v3 | |
| with: | |
| key: docs-build-${{ github.run_id }}-${{ github.run_attempt }} | |
| path: docs/build | |
| - name: Install moabb | |
| run: | | |
| uv pip install -e .[docs,deeplearning,optuna,external,carbonemission] | |
| uv pip install google-auth requests | |
| - name: Export GA4 page views snapshot | |
| env: | |
| GA4_PROPERTY_ID: ${{ secrets.GA4_PROPERTY_ID }} | |
| GA4_SERVICE_ACCOUNT_JSON: ${{ secrets.GA4_SERVICE_ACCOUNT_JSON }} | |
| run: | | |
| python .github/scripts/export_ga4_pageviews.py \ | |
| --output docs/source/_static/analytics/pageviews.json | |
| - name: Configure MOABB download directory | |
| run: | | |
| python -c " | |
| from moabb.utils import set_download_dir | |
| from mne import set_config | |
| import os | |
| mne_data = os.environ['MNE_DATA'] | |
| set_download_dir(mne_data) | |
| set_config('MOABB_RESULTS', mne_data) | |
| print(f'MNE_DATA set to: {mne_data}') | |
| " | |
| - name: Pre-download datasets (cold cache only) | |
| if: steps.cache-mne_data.outputs.cache-matched-key == '' | |
| run: | | |
| echo "Cache is cold, pre-downloading datasets with delays to avoid rate limiting..." | |
| python << 'EOF' | |
| import os | |
| import sys | |
| import time | |
| from moabb.utils import set_download_dir | |
| set_download_dir(os.environ["MNE_DATA"]) | |
| # All datasets used in examples that download from Zenodo | |
| # Each entry: (module_path, class_name, subjects) | |
| # subjects can be an int (first N) or a list of specific subject IDs | |
| datasets_to_download = [ | |
| ('moabb.datasets', 'BNCI2014_001', 3), | |
| ('moabb.datasets', 'BNCI2014_009', 2), | |
| ('moabb.datasets', 'Zhou2016', 4), # All 4 subjects | |
| ('moabb.datasets', 'AlexMI', 1), | |
| ('moabb.datasets', 'Kalunga2016', 2), | |
| ('moabb.datasets', 'Cattan2019_VR', 1), | |
| ('moabb.datasets', 'Hinss2021', 1), | |
| ('moabb.datasets', 'Dreyer2023A', [1, 5, 7, 35]), # plot_dreyer example | |
| ] | |
| failed = [] | |
| for module_path, class_name, subj_spec in datasets_to_download: | |
| if isinstance(subj_spec, list): | |
| subjects_label = f"subjects {subj_spec}" | |
| else: | |
| subjects_label = f"{subj_spec} subjects" | |
| print(f'\n{"="*60}') | |
| print(f'Pre-downloading {class_name} ({subjects_label})...') | |
| print(f'{"="*60}') | |
| success = False | |
| for attempt in range(5): # More retries | |
| try: | |
| import importlib | |
| module = importlib.import_module(module_path) | |
| cls = getattr(module, class_name) | |
| ds = cls() | |
| if isinstance(subj_spec, list): | |
| subjects = subj_spec | |
| else: | |
| subjects = ds.subject_list[:subj_spec] | |
| ds.download(subject_list=subjects) | |
| print(f'SUCCESS: {class_name} downloaded ({subjects_label})') | |
| success = True | |
| break | |
| except Exception as e: | |
| print(f'Attempt {attempt + 1}/5 failed: {e}') | |
| if attempt < 4: | |
| wait = 90 * (attempt + 1) # 90s, 180s, 270s, 360s | |
| print(f'Waiting {wait}s before retry...') | |
| time.sleep(wait) | |
| if not success: | |
| failed.append(class_name) | |
| print(f'FAILED: {class_name} after 5 attempts') | |
| # Wait between datasets to avoid rate limiting | |
| print('Waiting 60s before next dataset...') | |
| time.sleep(60) | |
| if failed: | |
| print(f'\n{"="*60}') | |
| print(f'ERROR: Failed to download: {", ".join(failed)}') | |
| print(f'{"="*60}') | |
| sys.exit(1) | |
| else: | |
| print(f'\n{"="*60}') | |
| print('All datasets downloaded successfully!') | |
| print(f'{"="*60}') | |
| EOF | |
| - name: Build docs | |
| env: | |
| # Disable parallel builds when cache is cold to avoid Zenodo rate limiting | |
| SPHINX_GALLERY_PARALLEL: ${{ steps.cache-mne_data.outputs.cache-matched-key != '' }} | |
| SPHINX_JOBS: ${{ steps.cache-mne_data.outputs.cache-matched-key != '' && 'auto' || '1' }} | |
| run: | | |
| echo "Cache hit: ${{ steps.cache-mne_data.outputs.cache-hit }}" | |
| echo "Cache matched key: ${{ steps.cache-mne_data.outputs.cache-matched-key }}" | |
| echo "Parallel gallery builds: $SPHINX_GALLERY_PARALLEL" | |
| echo "Sphinx jobs: $SPHINX_JOBS" | |
| # Pre-create .mne directory to avoid race condition when parallel | |
| # gallery examples call mne.set_config() simultaneously | |
| mkdir -p ~/.mne | |
| cd docs && make html | |
| - name: Generate notebooks from examples (Colab) | |
| run: | | |
| set -euo pipefail | |
| echo "Converting Python examples to notebooks in docs/build/html/auto_examples/_notebooks..." | |
| mkdir -p docs/build/html/auto_examples/_notebooks | |
| find examples -type f -name '*.py' | while read -r f; do | |
| rel="${f#examples/}" | |
| out_dir="docs/build/html/auto_examples/_notebooks/$(dirname "$rel")" | |
| mkdir -p "$out_dir" | |
| base="$(basename "$rel" .py)" | |
| out_path="$out_dir/$base.ipynb" | |
| python .github/scripts/convert_to_notebook.py --input "$f" --output "$out_path" | |
| done | |
| - name: Save MNE Data Cache | |
| if: success() && steps.cache-mne_data.outputs.cache-hit != 'true' && github.event_name == 'push' && (github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master') | |
| uses: actions/cache/save@v4 | |
| with: | |
| path: | | |
| mne_data | |
| ~/.mne | |
| key: doc-${{ runner.os }}-mne-data-v3 | |
| # Create an artifact of the html output. | |
| - uses: actions/upload-artifact@v4 | |
| with: | |
| name: DocumentationHTML | |
| path: docs/build/html/ | |
| deploy_neurotechx: | |
| if: ${{ github.ref == 'refs/heads/develop' }} | |
| needs: build_docs | |
| runs-on: ${{ matrix.os }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| os: [ ubuntu-latest ] | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Restore cached docs build | |
| id: cache-docs | |
| uses: actions/cache/restore@v3 | |
| with: | |
| key: docs-build-${{ github.run_id }}-${{ github.run_attempt }} | |
| path: docs/build | |
| - name: Check cache hit | |
| if: steps.cache-docs.outputs.cache-hit != 'true' | |
| run: exit 1 | |
| - name: Deploy Neurotechx Subpage | |
| uses: peaceiris/actions-gh-pages@v4 | |
| with: | |
| github_token: ${{ secrets.GITHUB_TOKEN }} | |
| deploy_key: ${{ secrets.ACTIONS_DEPLOY_KEY }} | |
| external_repository: NeuroTechX/moabb.github.io | |
| destination_dir: docs/ | |
| publish_branch: master | |
| publish_dir: ./docs/build/html | |
| cname: moabb.neurotechx.com/ | |
| deploy_gh_pages: | |
| if: ${{ github.ref == 'refs/heads/develop' }} | |
| needs: build_docs | |
| runs-on: ${{ matrix.os }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| os: [ ubuntu-latest ] | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Restore cached docs build | |
| id: cache-docs | |
| uses: actions/cache/restore@v3 | |
| with: | |
| key: docs-build-${{ github.run_id }}-${{ github.run_attempt }} | |
| path: docs/build | |
| - name: Check cache hit | |
| if: steps.cache-docs.outputs.cache-hit != 'true' | |
| run: exit 1 | |
| - name: Deploy gh-pages | |
| uses: peaceiris/actions-gh-pages@v4 | |
| with: | |
| github_token: ${{ secrets.GITHUB_TOKEN }} | |
| deploy_key: ${{ secrets.MOABB_DEPLOY_KEY_NEW }} | |
| destination_dir: docs/ | |
| publish_branch: gh-pages | |
| publish_dir: ./docs/build/html | |
| cname: neurotechx.github.io/moabb/ |