From 80703acf6a709dde1158553f98a214dce77d6785 Mon Sep 17 00:00:00 2001 From: David Kaplan Date: Wed, 4 Sep 2024 15:12:01 -0500 Subject: [PATCH] Revert "Run tests in parallel" --- CHANGELOG-unreleased.md | 1 - requirements_dev.txt | 2 -- tests/test_noisefit.py | 55 +++++++++++++------------------------- tests/test_publish.py | 39 +++++++++++---------------- tests/test_timing_model.py | 3 +++ tests/test_toa_shuffle.py | 30 ++++++++++++--------- tox.ini | 16 +++-------- 7 files changed, 60 insertions(+), 86 deletions(-) diff --git a/CHANGELOG-unreleased.md b/CHANGELOG-unreleased.md index 92ef91fb9..25f3d6283 100644 --- a/CHANGELOG-unreleased.md +++ b/CHANGELOG-unreleased.md @@ -11,7 +11,6 @@ the released changes. ### Changed - Moved the events -> TOAs and photon weights code into the function `load_events_weights` within `event_optimize`. - Updated the `maxMJD` argument in `event_optimize` to default to the current mjd -- Run CI tests in parallel, re-run failed tests (for intermittent failures due to random chance) - `maskParameter.__repr__()` output now includes the frozen attribute. - Changed default value of `FDJUMPLOG` to `Y` - Bumped `black` version to 24.x diff --git a/requirements_dev.txt b/requirements_dev.txt index d210e4854..a00511840 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -12,8 +12,6 @@ wheel>=0.29.0 pytest>=4.3 pytest-cov>=2.7.1 pytest-runner>=5.1 -pytest-xdist -pytest-rerunfailures flake8>=3.7 pep8-naming>=0.8.2 flake8-docstrings>=1.4 diff --git a/tests/test_noisefit.py b/tests/test_noisefit.py index 69f652cec..16e7cdc31 100644 --- a/tests/test_noisefit.py +++ b/tests/test_noisefit.py @@ -6,38 +6,27 @@ from io import StringIO import numpy as np -import pytest +par = """ + ELAT 1.3 1 + ELONG 2.5 1 + F0 100 1 + F1 1e-13 1 + PEPOCH 55000 + EPHEM DE440 + EFAC mjd 50000 53000 2 1 + EQUAD mjd 53000 55000 0.8 1 +""" -@pytest.fixture -def model_and_toas_1(): - par = """ - ELAT 1.3 1 - ELONG 2.5 1 - F0 100 1 - F1 1e-13 1 - PEPOCH 55000 - EPHEM DE440 - EFAC mjd 50000 53000 2 1 - EQUAD mjd 53000 55000 0.8 1 - """ +m = get_model(StringIO(par)) +t = make_fake_toas_uniform(50000, 55000, 200, m, add_noise=True) - m = get_model(StringIO(par)) - t = make_fake_toas_uniform(50000, 55000, 200, m, add_noise=True) +m2, t2 = get_model_and_toas( + datadir / "ecorr_fit_test.par", datadir / "ecorr_fit_test.tim" +) - return m, t - - -@pytest.fixture -def model_and_toas_2(): - return get_model_and_toas( - datadir / "ecorr_fit_test.par", datadir / "ecorr_fit_test.tim" - ) - - -def test_white_noise_fit(model_and_toas_1): - m, t = model_and_toas_1 +def test_white_noise_fit(): assert m.EFAC1.uncertainty_value == 0 and m.EQUAD1.uncertainty_value == 0 ftr = DownhillWLSFitter(t, m) @@ -58,9 +47,7 @@ def test_white_noise_fit(model_and_toas_1): ) -def test_white_noise_refit(model_and_toas_1): - m, t = model_and_toas_1 - +def test_white_noise_refit(): ftr = DownhillWLSFitter(t, m) ftr.model.EFAC1.value = 1.5 @@ -80,9 +67,7 @@ def test_white_noise_refit(model_and_toas_1): ) -def test_ecorr_fit(model_and_toas_2): - m2, t2 = model_and_toas_2 - +def test_ecorr_fit(): ftr = DownhillGLSFitter(t2, m2) ftr.fit_toas() @@ -94,9 +79,7 @@ def test_ecorr_fit(model_and_toas_2): ) -def test_ecorr_refit(model_and_toas_2): - m2, t2 = model_and_toas_2 - +def test_ecorr_refit(): ftr = DownhillGLSFitter(t2, m2) ftr.model.ECORR1.value = 0.75 diff --git a/tests/test_publish.py b/tests/test_publish.py index cab74adb1..f8663ff3d 100644 --- a/tests/test_publish.py +++ b/tests/test_publish.py @@ -5,37 +5,24 @@ from pint.scripts import pintpublish import os +data_NGC6440E = get_model_and_toas(datadir / "NGC6440E.par", datadir / "NGC6440E.tim") -@pytest.fixture -def data_NGC6440E(): - return get_model_and_toas(datadir / "NGC6440E.par", datadir / "NGC6440E.tim") - -@pytest.fixture -def data_J0613m0200_NANOGrav_9yv1(): - return get_model_and_toas( - datadir / "J0613-0200_NANOGrav_9yv1.gls.par", - datadir / "J0613-0200_NANOGrav_9yv1.tim", - ) - - -@pytest.fixture -def data_J1614m2230_NANOGrav_12yv3_wb(): - return get_model_and_toas( - datadir / "J1614-2230_NANOGrav_12yv3.wb.gls.par", - datadir / "J1614-2230_NANOGrav_12yv3.wb.tim", - ) - - -def test_NGC6440E(data_NGC6440E): +def test_NGC6440E(): m, t = data_NGC6440E output = publish(m, t) assert "1748-2021E" in output assert "DE421" in output +data_J0613m0200_NANOGrav_9yv1 = get_model_and_toas( + datadir / "J0613-0200_NANOGrav_9yv1.gls.par", + datadir / "J0613-0200_NANOGrav_9yv1.tim", +) + + @pytest.mark.parametrize("full", [True, False]) -def test_J0613m0200_NANOGrav_9yv1(data_J0613m0200_NANOGrav_9yv1, full): +def test_J0613m0200_NANOGrav_9yv1(full): m, t = data_J0613m0200_NANOGrav_9yv1 output = publish( m, t, include_dmx=full, include_fd=full, include_noise=full, include_jumps=full @@ -52,8 +39,14 @@ def test_J0613m0200_NANOGrav_9yv1(data_J0613m0200_NANOGrav_9yv1, full): assert not full or "RNAMP" in output +data_J1614m2230_NANOGrav_12yv3_wb = get_model_and_toas( + datadir / "J1614-2230_NANOGrav_12yv3.wb.gls.par", + datadir / "J1614-2230_NANOGrav_12yv3.wb.tim", +) + + @pytest.mark.parametrize("full", [True, False]) -def test_J1614m2230_NANOGrav_12yv3_wb(data_J1614m2230_NANOGrav_12yv3_wb, full): +def test_J1614m2230_NANOGrav_12yv3_wb(full): m, t = data_J1614m2230_NANOGrav_12yv3_wb output = publish( m, t, include_dmx=full, include_fd=full, include_noise=full, include_jumps=full diff --git a/tests/test_timing_model.py b/tests/test_timing_model.py index 5c9456757..2cd37645c 100644 --- a/tests/test_timing_model.py +++ b/tests/test_timing_model.py @@ -42,6 +42,9 @@ def timfile_nojumps(): return get_TOAs(os.path.join(datadir, "NGC6440E.tim")) +len_timfile_nojumps = len(get_TOAs(os.path.join(datadir, "NGC6440E.tim"))) + + class TestModelBuilding: def setup_method(self): self.parfile = os.path.join(datadir, "J0437-4715.par") diff --git a/tests/test_toa_shuffle.py b/tests/test_toa_shuffle.py index cd7bd1b3c..2d757c09b 100644 --- a/tests/test_toa_shuffle.py +++ b/tests/test_toa_shuffle.py @@ -18,22 +18,33 @@ import pint.residuals from pint.models import get_model +shuffletoas = """FORMAT 1 +test 1234.0 54321 0 pks +test2 888 59055 0 meerkat +test3 350 59000 0 gbt +""" + class TOAOrderSetup: parfile = os.path.join(datadir, "NGC6440E.par") model = get_model(parfile) # fake a multi-telescope, multi-frequency data-set and make sure the results don't depend on TOA order - t = ( + fakes = [ simulation.make_fake_toas_uniform( 55000, 55500, 30, model=model, freq=1400 * u.MHz, obs="ao" - ) - + simulation.make_fake_toas_uniform( + ), + simulation.make_fake_toas_uniform( 55010, 55500, 40, model=model, freq=800 * u.MHz, obs="gbt" - ) - + simulation.make_fake_toas_uniform( + ), + simulation.make_fake_toas_uniform( 55020, 55500, 50, model=model, freq=2000 * u.MHz, obs="@" - ) - ) + ), + ] + f = io.StringIO() + for t in fakes: + t.write_TOA_file(f) + f.seek(0) + t = toa.get_TOAs(f) r = pint.residuals.Residuals(t, model, subtract_mean=False) @classmethod @@ -84,11 +95,6 @@ def test_resorting_toas_chi2_match(sortkey): class TOALineOrderSetup: - shuffletoas = """FORMAT 1 - test 1234.0 54321 0 pks - test2 888 59055 0 meerkat - test3 350 59000 0 gbt - """ timfile = io.StringIO(shuffletoas) t = toa.get_TOAs(timfile) timfile.seek(0) diff --git a/tox.ini b/tox.ini index 3b714a73f..87a498e01 100644 --- a/tox.ini +++ b/tox.ini @@ -18,8 +18,7 @@ envlist = skip_missing_interpreters = True [tool:pytest] -# pytest docs seem to say that this section should be called just pytest, -# not tool:pytest; is it working? +# pytest docs seem to say that this section should be called just pytest, not tool:pytest; is it working? testpaths = tests addopts = --cov-report=term-missing @@ -34,8 +33,6 @@ passenv = deps = pytest - pytest-xdist - pytest-rerunfailures cov: coverage cov: pytest-cov cov: pytest-remotedata @@ -45,9 +42,8 @@ deps = setuptools commands = pip freeze - !cov: pytest --reruns 5 --verbose tests/test_toa_selection.py - !cov: pytest -n 6 --reruns 5 --verbose --ignore=tests/test_toa_selection.py - cov: pytest -n 6 --reruns 5 --verbose --pyargs tests --cov=pint --cov-config={toxinidir}/.coveragerc {posargs} + !cov: pytest + cov: pytest -v --pyargs tests --cov=pint --cov-config={toxinidir}/.coveragerc {posargs} cov: coverage xml -o {toxinidir}/coverage.xml depends = @@ -74,13 +70,9 @@ deps = matplotlib==3.2.0 scipy==1.4.1 pytest - pytest-xdist - pytest-rerunfailures coverage hypothesis<=6.72.0 -commands = - pytest -n 6 --reruns 5 --verbose - +commands = {posargs:pytest} [testenv:report] skip_install = true