From a506e9a2d8658c8670d56aaed125cb210767e19c Mon Sep 17 00:00:00 2001 From: Chase-Grajeda Date: Thu, 13 Jun 2024 17:11:22 -0400 Subject: [PATCH 1/4] First additions Added __init__.py for test module. Added test_configurators.py. Added basic fixtures and construction tests. --- tests/test_configurators/__init__.py | 0 .../test_configurators/test_configurators.py | 79 +++++++++++++++++++ 2 files changed, 79 insertions(+) create mode 100644 tests/test_configurators/__init__.py create mode 100644 tests/test_configurators/test_configurators.py diff --git a/tests/test_configurators/__init__.py b/tests/test_configurators/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_configurators/test_configurators.py b/tests/test_configurators/test_configurators.py new file mode 100644 index 00000000..8f97fe07 --- /dev/null +++ b/tests/test_configurators/test_configurators.py @@ -0,0 +1,79 @@ +import keras +import pytest + +from bayesflow.experimental.configurators import Configurator + +""" +TODO: with / without conditions + +TODO: with / without summary network inputs and outputs + +TODO: for data with any number of data dimensions + +TODO: (optional) for data with any number of batch dimensions +""" + +@pytest.fixture() +def random_data(): + return { + 'var1': keras.random.normal((5,5)), + 'var2': keras.random.normal((5,5)), + 'var3': keras.random.normal((5,5)), + 'summary_outputs': keras.random.normal((5,5)), + } + +@pytest.fixture() +def test_params(): + return { + 'inference_variables': ["var1"], + 'inference_conditions': ["var2, var3"], + 'summary_variables': ["var1"], + 'summary_conditions': ["var2"] + } + +@pytest.fixture() +def configurator(test_params): + return Configurator( + inference_variables=test_params['inference_variables'], + inference_conditions=test_params['inference_conditions'], + summary_variables=test_params['summary_variables'], + summary_conditions=test_params['summary_conditions'] + ) + +@pytest.fixture() +def configurator_sparse(test_params): + return Configurator( + inference_variables=test_params['inference_variables'], + ) + + +# Test for correct construction of Configurator with all args +def test_configurator_init(test_params, configurator: Configurator): + config = configurator + assert config.inference_variables == test_params['inference_variables'] + assert config.inference_conditions == test_params['inference_conditions'] + assert config.summary_variables == test_params['summary_variables'] + assert config.summary_conditions == test_params['summary_conditions'] + + +# Test for correct construction of Configurator with only inference_vars +def test_sparse_configurator_init(test_params, configurator_sparse: Configurator): + config = configurator_sparse + assert config.inference_variables == test_params['inference_variables'] + assert config.inference_conditions == [] + assert config.summary_variables == [] + assert config.summary_conditions == [] + +# TODO: Test successful configure_inference_variables (check shapes) + +# TODO: Test successful configure_inference_conditions w/o summary_outputs in either + +# TODO: Test successful configure_inference_conditions w/ summary_outputs in data, not in keys + +# TODO: Test successful configure_inference_conditions w/ summary_outputs in both + +# TODO: Test successful configure_summary_variables + +# TODO: Test successful configure_summary_conditions + +# TODO: Test for None return when keys == None for all params \ No newline at end of file From 867bca508b60e720f05632592b2c78d6eb3d02a0 Mon Sep 17 00:00:00 2001 From: Chase-Grajeda Date: Thu, 13 Jun 2024 17:57:58 -0400 Subject: [PATCH 2/4] Remaining tests Added remaining unit tests --- .../test_configurators/test_configurators.py | 105 ++++++++++++++---- 1 file changed, 86 insertions(+), 19 deletions(-) diff --git a/tests/test_configurators/test_configurators.py b/tests/test_configurators/test_configurators.py index 8f97fe07..366ed9dc 100644 --- a/tests/test_configurators/test_configurators.py +++ b/tests/test_configurators/test_configurators.py @@ -3,34 +3,51 @@ from bayesflow.experimental.configurators import Configurator -""" -TODO: with / without conditions -TODO: with / without summary network inputs and outputs +@pytest.fixture() +def test_shape(): + return (5,7) + -TODO: for data with any number of data dimensions +@pytest.fixture() +def random_data(test_shape): + return { + 'var1': keras.random.normal(test_shape), + 'var2': keras.random.normal(test_shape), + 'var3': keras.random.normal(test_shape), + 'summary_outputs': keras.random.normal(test_shape), + } -TODO: (optional) for data with any number of batch dimensions -""" @pytest.fixture() -def random_data(): +def random_data_no_output(test_shape): return { - 'var1': keras.random.normal((5,5)), - 'var2': keras.random.normal((5,5)), - 'var3': keras.random.normal((5,5)), - 'summary_outputs': keras.random.normal((5,5)), + 'var1': keras.random.normal(test_shape), + 'var2': keras.random.normal(test_shape), + 'var3': keras.random.normal(test_shape), } + @pytest.fixture() def test_params(): return { 'inference_variables': ["var1"], - 'inference_conditions': ["var2, var3"], + 'inference_conditions': ["var2", "var3", "summary_outputs"], 'summary_variables': ["var1"], 'summary_conditions': ["var2"] } + +@pytest.fixture() +def test_params_no_output(): + return { + 'inference_variables': ["var1"], + 'inference_conditions': ["var2", "var3"], + 'summary_variables': ["var1"], + 'summary_conditions': ["var2"] + } + + @pytest.fixture() def configurator(test_params): return Configurator( @@ -40,6 +57,17 @@ def configurator(test_params): summary_conditions=test_params['summary_conditions'] ) + +@pytest.fixture() +def configurator_no_output(test_params_no_output): + return Configurator( + inference_variables=test_params_no_output['inference_variables'], + inference_conditions=test_params_no_output['inference_conditions'], + summary_variables=test_params_no_output['summary_variables'], + summary_conditions=test_params_no_output['summary_conditions'] + ) + + @pytest.fixture() def configurator_sparse(test_params): return Configurator( @@ -64,16 +92,55 @@ def test_sparse_configurator_init(test_params, configurator_sparse: Configurator assert config.summary_variables == [] assert config.summary_conditions == [] -# TODO: Test successful configure_inference_variables (check shapes) -# TODO: Test successful configure_inference_conditions w/o summary_outputs in either +# Test successful configure_inference_variables() +def test_inference_vars_filter(random_data, configurator: Configurator, test_shape): + config = configurator + filtered_data = config.configure_inference_variables(random_data) + assert filtered_data.shape == test_shape + + +# Test successful configure_inference_conditions w/o summary_outputs in either +def test_inferences_conds_filter_no_outputs(random_data_no_output, configurator_no_output: Configurator, test_shape): + config = configurator_no_output + filtered_data = config.configure_inference_conditions(random_data_no_output) + assert filtered_data.shape == (test_shape[0], test_shape[1] * 2) + -# TODO: Test successful configure_inference_conditions w/ summary_outputs in data, not in keys +# Test successful configure_inference_conditions w/ summary_outputs in data, not in keys +def test_inferences_conds_filter_partial_outputs(random_data, configurator_no_output: Configurator, test_shape): + config = configurator_no_output + filtered_data = config.configure_inference_conditions(random_data) + assert filtered_data.shape == (test_shape[0], test_shape[1] * 3) -# TODO: Test successful configure_inference_conditions w/ summary_outputs in both -# TODO: Test successful configure_summary_variables +# Test successful configure_inference_conditions w/ summary_outputs in both +def test_inferences_conds_filter_with_outputs(random_data, configurator: Configurator, test_shape): + config = configurator + filtered_data = config.configure_inference_conditions(random_data) + assert filtered_data.shape == (test_shape[0], test_shape[1] * 3) + + +# Test successful configure_summary_variables() +def test_summary_vars_filter(random_data, configurator: Configurator, test_shape): + config = configurator + filtered_data = config.configure_summary_variables(random_data) + assert filtered_data.shape == test_shape -# TODO: Test successful configure_summary_conditions -# TODO: Test for None return when keys == None for all params \ No newline at end of file +# Test successful configure_summary_conditions() +def test_summary_conds_filter(random_data, configurator: Configurator, test_shape): + config = configurator + filtered_data = config.configure_summary_conditions(random_data) + assert filtered_data.shape == test_shape + + +# Test return None for filters when configuring sparse Configurator +def test_null_vars_and_conds(random_data_no_output, configurator_sparse: Configurator): + config = configurator_sparse + filtered_inference_conds = config.configure_inference_conditions(random_data_no_output) + filtered_summary_vars = config.configure_summary_variables(random_data_no_output) + filtered_summary_conds = config.configure_summary_conditions(random_data_no_output) + assert filtered_inference_conds == None + assert filtered_summary_vars == None + assert filtered_summary_conds == None \ No newline at end of file From a1549118d3ecb77672ae3ab63ae712e9a6de3fee Mon Sep 17 00:00:00 2001 From: Chase-Grajeda Date: Fri, 14 Jun 2024 09:16:19 -0400 Subject: [PATCH 3/4] Added conftest Separated fixtures and placed them in conftest.py --- tests/test_configurators/conftest.py | 74 +++++++++++++++++++ .../test_configurators/test_configurators.py | 72 ------------------ 2 files changed, 74 insertions(+), 72 deletions(-) create mode 100644 tests/test_configurators/conftest.py diff --git a/tests/test_configurators/conftest.py b/tests/test_configurators/conftest.py new file mode 100644 index 00000000..e1e87301 --- /dev/null +++ b/tests/test_configurators/conftest.py @@ -0,0 +1,74 @@ +import keras +import pytest +from bayesflow.experimental.configurators import Configurator + + +@pytest.fixture() +def test_shape(): + return (5,7) + + +@pytest.fixture() +def random_data(test_shape): + return { + 'var1': keras.random.normal(test_shape), + 'var2': keras.random.normal(test_shape), + 'var3': keras.random.normal(test_shape), + 'summary_outputs': keras.random.normal(test_shape), + } + + +@pytest.fixture() +def random_data_no_output(test_shape): + return { + 'var1': keras.random.normal(test_shape), + 'var2': keras.random.normal(test_shape), + 'var3': keras.random.normal(test_shape), + } + + +@pytest.fixture() +def test_params(): + return { + 'inference_variables': ["var1"], + 'inference_conditions': ["var2", "var3", "summary_outputs"], + 'summary_variables': ["var1"], + 'summary_conditions': ["var2"] + } + + +@pytest.fixture() +def test_params_no_output(): + return { + 'inference_variables': ["var1"], + 'inference_conditions': ["var2", "var3"], + 'summary_variables': ["var1"], + 'summary_conditions': ["var2"] + } + + +@pytest.fixture() +def configurator(test_params): + return Configurator( + inference_variables=test_params['inference_variables'], + inference_conditions=test_params['inference_conditions'], + summary_variables=test_params['summary_variables'], + summary_conditions=test_params['summary_conditions'] + ) + + +@pytest.fixture() +def configurator_no_output(test_params_no_output): + return Configurator( + inference_variables=test_params_no_output['inference_variables'], + inference_conditions=test_params_no_output['inference_conditions'], + summary_variables=test_params_no_output['summary_variables'], + summary_conditions=test_params_no_output['summary_conditions'] + ) + + +@pytest.fixture() +def configurator_sparse(test_params): + return Configurator( + inference_variables=test_params['inference_variables'], + ) \ No newline at end of file diff --git a/tests/test_configurators/test_configurators.py b/tests/test_configurators/test_configurators.py index 366ed9dc..74d8b459 100644 --- a/tests/test_configurators/test_configurators.py +++ b/tests/test_configurators/test_configurators.py @@ -1,80 +1,8 @@ import keras import pytest - from bayesflow.experimental.configurators import Configurator -@pytest.fixture() -def test_shape(): - return (5,7) - - -@pytest.fixture() -def random_data(test_shape): - return { - 'var1': keras.random.normal(test_shape), - 'var2': keras.random.normal(test_shape), - 'var3': keras.random.normal(test_shape), - 'summary_outputs': keras.random.normal(test_shape), - } - - -@pytest.fixture() -def random_data_no_output(test_shape): - return { - 'var1': keras.random.normal(test_shape), - 'var2': keras.random.normal(test_shape), - 'var3': keras.random.normal(test_shape), - } - - -@pytest.fixture() -def test_params(): - return { - 'inference_variables': ["var1"], - 'inference_conditions': ["var2", "var3", "summary_outputs"], - 'summary_variables': ["var1"], - 'summary_conditions': ["var2"] - } - - -@pytest.fixture() -def test_params_no_output(): - return { - 'inference_variables': ["var1"], - 'inference_conditions': ["var2", "var3"], - 'summary_variables': ["var1"], - 'summary_conditions': ["var2"] - } - - -@pytest.fixture() -def configurator(test_params): - return Configurator( - inference_variables=test_params['inference_variables'], - inference_conditions=test_params['inference_conditions'], - summary_variables=test_params['summary_variables'], - summary_conditions=test_params['summary_conditions'] - ) - - -@pytest.fixture() -def configurator_no_output(test_params_no_output): - return Configurator( - inference_variables=test_params_no_output['inference_variables'], - inference_conditions=test_params_no_output['inference_conditions'], - summary_variables=test_params_no_output['summary_variables'], - summary_conditions=test_params_no_output['summary_conditions'] - ) - - -@pytest.fixture() -def configurator_sparse(test_params): - return Configurator( - inference_variables=test_params['inference_variables'], - ) - - # Test for correct construction of Configurator with all args def test_configurator_init(test_params, configurator: Configurator): config = configurator From b2922cae353e0194b7dd89c7ac4d8e85873f5dfe Mon Sep 17 00:00:00 2001 From: Chase-Grajeda Date: Tue, 18 Jun 2024 08:37:16 -0400 Subject: [PATCH 4/4] Added requested changes Added batch_size, set_size, and num_features parameterizations in conftest.py. Combined repetitive fixtures in conftest.py. Combined repetitive tests in test_configurators.py. Parameterized Configurator initialization in conftest.py. Parameterized parameter selection in conftest.py. Removed initialization tests in test_configurators.py. Added summary_inputs and summary_conditions to parameters. Changed instances of '==None' to 'is None'. Removed 'config=Configurator' instances in test_configurators.py. --- tests/test_configurators/conftest.py | 99 +++++++-------- .../test_configurators/test_configurators.py | 116 +++++++----------- 2 files changed, 87 insertions(+), 128 deletions(-) diff --git a/tests/test_configurators/conftest.py b/tests/test_configurators/conftest.py index e1e87301..17f82fcc 100644 --- a/tests/test_configurators/conftest.py +++ b/tests/test_configurators/conftest.py @@ -3,72 +3,57 @@ from bayesflow.experimental.configurators import Configurator -@pytest.fixture() -def test_shape(): - return (5,7) +@pytest.fixture(params=[2, 3]) +def batch_size(request): + return request.param -@pytest.fixture() -def random_data(test_shape): - return { - 'var1': keras.random.normal(test_shape), - 'var2': keras.random.normal(test_shape), - 'var3': keras.random.normal(test_shape), - 'summary_outputs': keras.random.normal(test_shape), - } +@pytest.fixture(params=[2, 3]) +def set_size(request): + return request.param -@pytest.fixture() -def random_data_no_output(test_shape): - return { - 'var1': keras.random.normal(test_shape), - 'var2': keras.random.normal(test_shape), - 'var3': keras.random.normal(test_shape), - } +@pytest.fixture(params=[2,3]) +def num_features(request): + return request.param -@pytest.fixture() -def test_params(): - return { - 'inference_variables': ["var1"], - 'inference_conditions': ["var2", "var3", "summary_outputs"], - 'summary_variables': ["var1"], - 'summary_conditions': ["var2"] +@pytest.fixture(params=[True, False]) +def random_data(request, batch_size, set_size, num_features): + data = { + "var1": keras.random.normal((batch_size, set_size, num_features)), + "var2": keras.random.normal((batch_size, set_size, num_features)), + "var3": keras.random.normal((batch_size, set_size, num_features)), + "summary_inputs": keras.random.normal((batch_size, set_size, num_features)), + "summary_conditions": keras.random.normal((batch_size, set_size, num_features)) } - - -@pytest.fixture() -def test_params_no_output(): - return { - 'inference_variables': ["var1"], - 'inference_conditions': ["var2", "var3"], - 'summary_variables': ["var1"], - 'summary_conditions': ["var2"] + if request.param: + data["summary_outputs"] = keras.random.normal((batch_size, set_size, num_features)) + return data + + +@pytest.fixture(params=[True, False]) +def test_params(request): + args = { + "inference_variables": ["var1"], + "inference_conditions": ["var2", "var3"], + "summary_variables": ["var1"], + "summary_conditions": ["var2"] } + if request.param: + args["inference_conditions"].append("summary_outputs") + return args -@pytest.fixture() -def configurator(test_params): - return Configurator( - inference_variables=test_params['inference_variables'], - inference_conditions=test_params['inference_conditions'], - summary_variables=test_params['summary_variables'], - summary_conditions=test_params['summary_conditions'] - ) - - -@pytest.fixture() -def configurator_no_output(test_params_no_output): - return Configurator( - inference_variables=test_params_no_output['inference_variables'], - inference_conditions=test_params_no_output['inference_conditions'], - summary_variables=test_params_no_output['summary_variables'], - summary_conditions=test_params_no_output['summary_conditions'] - ) - - -@pytest.fixture() -def configurator_sparse(test_params): +@pytest.fixture(params=[True, False]) +def configurator(request, test_params): + if request.param: + return Configurator( + inference_variables=test_params["inference_variables"] + ) return Configurator( - inference_variables=test_params['inference_variables'], + inference_variables=test_params["inference_variables"], + inference_conditions=test_params["inference_conditions"], + summary_variables=test_params["summary_variables"], + summary_conditions=test_params["summary_conditions"] ) \ No newline at end of file diff --git a/tests/test_configurators/test_configurators.py b/tests/test_configurators/test_configurators.py index 74d8b459..98cc2af6 100644 --- a/tests/test_configurators/test_configurators.py +++ b/tests/test_configurators/test_configurators.py @@ -1,74 +1,48 @@ -import keras +from keras import ops import pytest -from bayesflow.experimental.configurators import Configurator -# Test for correct construction of Configurator with all args -def test_configurator_init(test_params, configurator: Configurator): - config = configurator - assert config.inference_variables == test_params['inference_variables'] - assert config.inference_conditions == test_params['inference_conditions'] - assert config.summary_variables == test_params['summary_variables'] - assert config.summary_conditions == test_params['summary_conditions'] - - -# Test for correct construction of Configurator with only inference_vars -def test_sparse_configurator_init(test_params, configurator_sparse: Configurator): - config = configurator_sparse - assert config.inference_variables == test_params['inference_variables'] - assert config.inference_conditions == [] - assert config.summary_variables == [] - assert config.summary_conditions == [] - - -# Test successful configure_inference_variables() -def test_inference_vars_filter(random_data, configurator: Configurator, test_shape): - config = configurator - filtered_data = config.configure_inference_variables(random_data) - assert filtered_data.shape == test_shape - - -# Test successful configure_inference_conditions w/o summary_outputs in either -def test_inferences_conds_filter_no_outputs(random_data_no_output, configurator_no_output: Configurator, test_shape): - config = configurator_no_output - filtered_data = config.configure_inference_conditions(random_data_no_output) - assert filtered_data.shape == (test_shape[0], test_shape[1] * 2) - - -# Test successful configure_inference_conditions w/ summary_outputs in data, not in keys -def test_inferences_conds_filter_partial_outputs(random_data, configurator_no_output: Configurator, test_shape): - config = configurator_no_output - filtered_data = config.configure_inference_conditions(random_data) - assert filtered_data.shape == (test_shape[0], test_shape[1] * 3) - - -# Test successful configure_inference_conditions w/ summary_outputs in both -def test_inferences_conds_filter_with_outputs(random_data, configurator: Configurator, test_shape): - config = configurator - filtered_data = config.configure_inference_conditions(random_data) - assert filtered_data.shape == (test_shape[0], test_shape[1] * 3) - - -# Test successful configure_summary_variables() -def test_summary_vars_filter(random_data, configurator: Configurator, test_shape): - config = configurator - filtered_data = config.configure_summary_variables(random_data) - assert filtered_data.shape == test_shape - - -# Test successful configure_summary_conditions() -def test_summary_conds_filter(random_data, configurator: Configurator, test_shape): - config = configurator - filtered_data = config.configure_summary_conditions(random_data) - assert filtered_data.shape == test_shape - - -# Test return None for filters when configuring sparse Configurator -def test_null_vars_and_conds(random_data_no_output, configurator_sparse: Configurator): - config = configurator_sparse - filtered_inference_conds = config.configure_inference_conditions(random_data_no_output) - filtered_summary_vars = config.configure_summary_variables(random_data_no_output) - filtered_summary_conds = config.configure_summary_conditions(random_data_no_output) - assert filtered_inference_conds == None - assert filtered_summary_vars == None - assert filtered_summary_conds == None \ No newline at end of file +def test_inference_vars_filter(random_data, configurator): + # Tests for correct output shape when querying inference variables + filtered_data = configurator.configure_inference_variables(random_data) + expected = ops.concatenate([random_data[v] for v in configurator.inference_variables], axis=-1) + assert filtered_data.shape == expected.shape + + +def test_inferences_conds_filter(random_data, configurator): + # Tests for correct output shape when querying inference conditions w.r.t. summary_outputs + if not configurator.inference_conditions: + if "summary_outputs" in random_data: + assert configurator.configure_inference_conditions(random_data).shape == random_data["summary_outputs"].shape + else: + assert configurator.configure_inference_conditions(random_data) is None + elif not "summary_outputs" in random_data and "summary_outputs" in configurator.inference_conditions: + with pytest.raises(KeyError): + filtered_data = configurator.configure_inference_conditions(random_data) + else: + filtered_data = configurator.configure_inference_conditions(random_data) + tensors = [random_data[v] for v in configurator.inference_conditions] + if "summary_outputs" in random_data and not "summary_outputs" in configurator.inference_conditions: + tensors.append(random_data["summary_outputs"]) + expected = ops.concatenate(tensors, axis=-1) + assert filtered_data.shape == expected.shape + + +def test_summary_vars_filter(random_data, configurator): + # Tests for correct output shape when querying summary variables + if not configurator.summary_variables: + assert configurator.configure_summary_variables(random_data) is None + else: + filtered_data = configurator.configure_summary_variables(random_data) + expected = ops.concatenate([random_data[v] for v in configurator.summary_variables], axis=-1) + assert filtered_data.shape == expected.shape + + +def test_summary_conds_filter(random_data, configurator): + # Tests for correct output shape when querying summary conditions + if not configurator.summary_conditions: + assert configurator.configure_summary_conditions(random_data) is None + else: + filtered_data = configurator.configure_summary_conditions(random_data) + expected = ops.concatenate([random_data[v] for v in configurator.summary_conditions], axis=-1) + assert filtered_data.shape == expected.shape \ No newline at end of file