Skip to content

Schedule tests to run weekly #378

Schedule tests to run weekly

Schedule tests to run weekly #378

Workflow file for this run

name: tests
on:
push:
branches:
- "main"
tags:
- "*"
pull_request:
schedule:
# Runs at 6:10am UTC on Monday
- cron: '10 6 * * 1'
workflow_dispatch:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
linting:
name: Linting
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/lint@v2
manifest:
name: Check manifest
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/check_manifest@v2
test:
needs: [linting, manifest]
name: ${{ matrix.os }} py${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
env:
KERAS_BACKEND: torch
strategy:
fail-fast: false
matrix:
# Run tests on ubuntu across all supported versions
python-version: ["3.10", "3.11", "3.12"]
os: [ubuntu-latest]
# Include a Windows test and old/new Mac runs
include:
- os: macos-13
python-version: "3.12"
- os: macos-latest
python-version: "3.12"
- os: windows-latest
python-version: "3.12"
steps:
# Cache atlases
- name: Cache brainglobe directory
uses: actions/cache@v3
with:
path: | # ensure we don't cache any interrupted atlas download and extraction, if e.g. we cancel the workflow manually
~/.brainglobe
!~/.brainglobe/atlas.tar.gz
key: atlases-models
fail-on-cache-miss: true
enableCrossOsArchive: true
# install additional Macos dependencies
- name: install HDF5 libraries (needed on M1 Macs only)
if: matrix.os == 'macos-latest'
run: |
brew install hdf5
# Cache cellfinder workflow data
- name: Cache data for cellfinder workflow tests
uses: actions/cache@v3
with:
path: "~/.brainglobe-tests"
key: cellfinder-test-data
fail-on-cache-miss: true
enableCrossOsArchive: true
# Run tests
- uses: neuroinformatics-unit/actions/test@v2
with:
python-version: ${{ matrix.python-version }}
secret-codecov-token: ${{ secrets.CODECOV_TOKEN }}
benchmarks:
name: Check benchmarks
runs-on: ubuntu-latest
# Set shell in login mode as global setting for the job
defaults:
run:
shell: bash -l {0}
strategy:
matrix:
python-version: ["3.12"]
steps:
- name: Checkout brainglobe-workflows repository
uses: actions/checkout@v4
- name: Create and activate conda environment # we need conda for asv management of environments
uses: conda-incubator/[email protected] # see https://github.com/conda-incubator/setup-miniconda/issues/261
with:
miniconda-version: py310_24.1.2-0 # we need conda<24.3, see https://github.com/airspeed-velocity/asv/pull/1397
python-version: ${{ matrix.python-version }}
activate-environment: asv-only
- name: Install asv
run: |
pip install --upgrade pip
pip install asv
- name: Run asv check with pip dependencies
working-directory: ${{ github.workspace }}/benchmarks
run: |
# check benchmarks with pip dependencies
asv check -v --config $GITHUB_WORKSPACE/benchmarks/asv.pip.conf.json
- name: Run asv check with latest-github dependencies
working-directory: ${{ github.workspace }}/benchmarks
run: |
# check benchmarks with latest-github dependencies
asv check -v --config $GITHUB_WORKSPACE/benchmarks/asv.latest-github.conf.json
build_sdist_wheels:
name: Build source distribution
needs: [test]
if: github.event_name == 'push' && github.ref_type == 'tag'
runs-on: ubuntu-latest
steps:
- uses: neuroinformatics-unit/actions/build_sdist_wheels@v2
upload_all:
name: Publish build distributions
needs: [build_sdist_wheels]
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.ref_type == 'tag'
steps:
- uses: neuroinformatics-unit/actions/upload_pypi@v2
with:
secret-pypi-key: ${{ secrets.TWINE_API_KEY }}