diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 3bcbdf0fa..231e86471 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -8,16 +8,16 @@ ### Testing + ### Deployment Plan (For developer use) _How does the changes affect the product?_ - [ ] Code only? +- [ ] If applicable, has a deployment plan be created with the deployment person/team? - [ ] Require new or adjusted data inputs? Does it have start, end and duration code (in UTC)? - [ ] If new or updated data sets, has the FIM code been updated and tested with the new/adjusted data (subset is fine, but must be a subset of the new data)? - [ ] Require new pre-clip set? - [ ] Has new or updated python packages? -- [ ] If applicable, has a deployment plan be created with the deployment person/team? - ### Issuer Checklist (For developer use) @@ -30,11 +30,9 @@ _You may update this checklist before and/or after creating the PR. If you're un - [ ] The feature branch you're submitting as a PR is up to date (merged) with the latest `dev` branch - [ ] `pre-commit` hooks were run locally - [ ] Any _change_ in functionality is tested -- [ ] Passes all unit tests locally (inside interactive Docker container, at `/foss_fim/`, run: `pytest unit_tests/`) - [ ] New functions are documented (with a description, list of inputs, and expected output) - [ ] Placeholder code is flagged / future todos are captured in comments - [ ] [CHANGELOG](/docs/CHANGELOG.md) updated with template version number, e.g. `4.x.x.x` -- [ ] [Reviewers requested](https://help.github.com/articles/requesting-a-pull-request-review/) - [ ] Add yourself as an [assignee](https://docs.github.com/en/issues/tracking-your-work-with-issues/assigning-issues-and-pull-requests-to-other-github-users) in the PR as well as the FIM Technical Lead ### Merge Checklist (For Technical Lead use only) diff --git a/.gitignore b/.gitignore index 7ce22521c..d5643ae0e 100644 --- a/.gitignore +++ b/.gitignore @@ -13,5 +13,4 @@ config/** config/*.swp .vscode/ **/.DS_Store -**/*_pytest.py .private/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3e114abed..cd75f9d20 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -25,8 +25,7 @@ guidance below. ## Changing the code-base Generally speaking, you should fork this repository, make changes in your -own fork, and then submit a pull request. All new code should have associated -unit tests (added to `/unit_tests`) that validate implemented features and the presence or lack of defects. +own fork, and then submit a pull request. Additionally, the code should follow any stylistic and architectural guidelines prescribed by the project. In the absence of such guidelines, mimic the styles and patterns in the existing code-base. @@ -118,19 +117,14 @@ If you would like to contribute, please follow these steps: # optionally close the container # Back on your terminal console (outside the container), use the typical git add, git commit, git push - -8. [Within the container](README.md#startrun-the-docker-container), ensure sure unit tests pass ([instructions here](/unit_tests/README.md)). - ``` - pytest unit_tests/ - ``` -9. Outside of the Docker container, commit your changes: +8. Outside of the Docker container, commit your changes: ``` git commit -m "" ``` This will invoke pre-commit hooks mentioned in step 6 that will lint & format the code (some others as well). In many cases non-compliant code will be rectified automatically, but in some cases manual changes will be necessary. Make sure all of these checks pass. If not, make necessary changes (`git add <...>`), and re-issue `git commit -m "<...>"`. -10. Push to your forked branch: +9. Push to your forked branch: ``` git push -u origin ``` diff --git a/Dockerfile b/Dockerfile index db1ecf72b..6b65469a6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -72,9 +72,17 @@ COPY --from=builder $depDir $depDir # remove reference to missing repo RUN rm /etc/apt/sources.list.d/apache-arrow.sources -RUN apt-get update --fix-missing && apt-get install -y openjdk-19-jdk && rm -rf /var/lib/apt/lists/* - +RUN apt-get update --fix-missing && apt-get install -y openjdk-21-jdk && rm -rf /var/lib/apt/lists/* RUN apt update --fix-missing + +# An older version of openjdk still exists on the file system but was never cleaned up +# After research, we realized, it just needs file cleanup. Leaving it there is triggering security warnings +# RUN apt-get remove -y openjdk-17-jdk (not installed, just residue left) +RUN rm -rf ./usr/lib/jvm/*java-1.17* && \ + rm -rf ./usr/lib/jvm/.java-1.17* && \ + rm -rdf ./usr/lib/jvm/java-17* + + RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt install -y p7zip-full python3-pip time mpich parallel libgeos-dev expect tmux rsync tzdata RUN apt auto-remove @@ -91,7 +99,9 @@ ENV PYTHONUNBUFFERED=TRUE ## ADD TO PATHS ## ENV PATH="$projectDir:${PATH}" -ENV PYTHONPATH=${PYTHONPATH}:$srcDir:$projectDir/unit_tests:$projectDir/tools +#ENV PATH=${PATH}:$projectDir:$projectDir/$srcDir:$projectDir/tools +# Jul 17, 2024: Even though PYTHONPATH isn't used, it still seems to want it. +ENV PYTHONPATH=${PATH}:$srcDir:$projectDir/tools ## install python 3 modules ## @@ -110,6 +120,7 @@ RUN pip3 install pipenv==2023.12.1 && PIP_NO_CACHE_DIR=off pipenv install --syst # We download and unzip it to the same file folder that pip deployed the whitebox library. # Whitebox also attempts to always download a folder called testdata regardless of use. # We added an empty folder to fake out whitebox_tools.py so it doesn't try to download the folder + # RUN wbox_path=/usr/local/lib/python3.10/dist-packages/whitebox/WBT && \ # wget -P $wbox_path https://www.whiteboxgeo.com/WBT_Linux/WhiteboxTools_linux_musl.zip && \ # unzip -o $wbox_path/WhiteboxTools_linux_musl.zip -d $wbox_path && \ diff --git a/Pipfile b/Pipfile index 6ba0dc993..9290a1e29 100644 --- a/Pipfile +++ b/Pipfile @@ -7,11 +7,11 @@ verify_ssl = true ipython = "==8.24.0" [packages] -certifi = "==2023.7.22" +certifi = "==2024.7.4" fiona = "==1.8.22" geopandas = "==0.14.3" -numba = "==0.56.4" -numpy = "==1.23.5" +numba = "==0.60.0" +numpy = "==1.26.4" pandas = "==2.0.2" rasterio = "==1.3.6" rasterstats = "==0.18.0" @@ -20,7 +20,7 @@ tqdm = "==4.66.3" seaborn = "==0.12.2" python-dotenv = "==1.0.0" natsort = "==8.3.1" -xarray = "==2023.1.0" +xarray = "==2024.6.0" netcdf4 = "==1.6.3" tables = "==3.8.0" pyproj = "==3.5.0" @@ -29,20 +29,19 @@ boto3 = "==1.26.109" jupyter = "==1.0.0" jupyterlab = "==3.6.7" ipympl = "==0.9.3" -pytest = "==7.3.0" -whitebox = "2.3.4" +whitebox = "==2.3.4" shapely = "==2.0.1" pyarrow = "==14.0.1" rtree = "==1.0.1" py7zr = "==0.20.4" -scipy = "==1.10.1" -gval = "==0.2.3" +scipy = "==1.14.0" +gval = "==0.2.7" flake8 = "==6.0.0" black = "==24.3.0" flake8-pyproject = "==1.2.3" pre-commit = "==3.3.3" isort = "==5.12.0" -urllib3 = "==1.26.18" +urllib3 = "==1.26.19" pyflwdir = "==0.5.8" pillow = "==10.3.0" pyogrio = "==0.7.2" @@ -52,6 +51,9 @@ lmoments3 = "==1.0.6" zarr = "==2.18.0" requests = "==2.32.3" aiohttp = "==3.9.5" +distributed = "==2024.6.2" +monaco = "==0.13.1" +dask = "==2024.6.2" [requires] python_version = "3.10" diff --git a/Pipfile.lock b/Pipfile.lock index 3c1bc0455..ed11ccabf 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "898973145b92f3e3eb9afcaf3cee8bd5da693fbd8ae06046e66d3a73022c27a5" + "sha256": "75c63212784d57d42fc7b5536a4634408876142a2abc57317537c8e7ef4abab5" }, "pipfile-spec": 6, "requires": { @@ -24,6 +24,14 @@ "markers": "python_version >= '3.7'", "version": "==2.4.0" }, + "aiobotocore": { + "hashes": [ + "sha256:337429ffd3cc367532572d40be809a84c7b5335f3f8eca2f23e09dfaa9a9ef90", + "sha256:e7399f21570db1c287f1c0c814dd3475dfe1c8166722e2c77ce67f172cbcfa89" + ], + "markers": "python_version >= '3.7'", + "version": "==2.5.2" + }, "aiofiles": { "hashes": [ "sha256:1142fa8e80dbae46bb6339573ad4c8c0841358f79c6eb50a493dceca14621bad", @@ -115,6 +123,14 @@ "markers": "python_version >= '3.8'", "version": "==3.9.5" }, + "aioitertools": { + "hashes": [ + "sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394", + "sha256:42c68b8dd3a69c2bf7f2233bf7df4bb58b557bca5252ac02ed5187bbc67d6831" + ], + "markers": "python_version >= '3.6'", + "version": "==0.11.0" + }, "aiosignal": { "hashes": [ "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc", @@ -309,11 +325,58 @@ }, "botocore": { "hashes": [ - "sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df", - "sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210" + "sha256:a50edd715eb510343e27849f36483804aae4b871590db4d4996aa53368dcac40", + "sha256:b906999dd53dda2ef0ef6f7f55fcc81a4b06b9f1c8a9f65c546e0b981f959f5f" ], "markers": "python_version >= '3.7'", - "version": "==1.29.165" + "version": "==1.29.161" + }, + "bottleneck": { + "hashes": [ + "sha256:03c43150f180d86a5633a6da788660d335983f6798fca306ba7f47ff27a1b7e7", + "sha256:1490348b3bbc0225523dc2c00c6bb3e66168c537d62797bd29783c0826c09838", + "sha256:14b3334a39308fbb05dacd35ac100842aa9e9bc70afbdcebe43e46179d183fd0", + "sha256:1b4dac5d2a871b7bd296c2b92426daa27d5b07aa84ef2557db097d29135da4eb", + "sha256:2110af22aa8c2779faba8aa021d6b559df04449bdf21d510eacd7910934189fe", + "sha256:220b72405f77aebb0137b733b464c2526ded471e4289ac1e840bab8852759a55", + "sha256:23834d82177d6997f21fa63156550668cd07a9a6e5a1b66ea80f1a14ac6ffd07", + "sha256:28260197ab8a4a6b7adf810523147b1a3e85607f4e26a0f685eb9d155cfc75af", + "sha256:2861ff645d236f1a6f5c6d1ddb3db37d19af1d91057bdc4fd7b76299a15b3079", + "sha256:2bb79a2ac135567694f13339f0bebcee96aec09c596b324b61cd7fd5e306f49d", + "sha256:381cbd1e52338fcdf9ff01c962e6aa187b2d8b3b369d42e779b6d33ac61f8d35", + "sha256:44305c70c2a1539b0ae968e033f301ad868a6146b47e3cccd73fdfe3fc07c4ee", + "sha256:4a91e40bbb8452e77772614d882be2c34b3b514d9f15460f703293525a6e173d", + "sha256:520d7a83cd48b3f58e5df1a258acb547f8a5386a8c21ca9e1058d83a0d622fdf", + "sha256:59604949aea476f5075b965129eaa3c2d90891fd43b0dfaf2ad7621bb5db14a5", + "sha256:5d6bf45ed58d5e7414c0011ef2da75474fe597a51970df83596b0bcb79c14c5e", + "sha256:6136ce7dcf825c432a20b80ab1c460264a437d8430fff32536176147e0b6b832", + "sha256:6179791c0119aec3708ef74ddadab8d183e3742adb93a9028718e8696bdf572b", + "sha256:67347b0f01f32a232a6269c37afc1c079e08f6455fa12e91f4a1cd12eb0d11a5", + "sha256:6a36280ee33d9db799163f04e88b950261e590cc71d089f5e179b21680b5d491", + "sha256:817aa43a671ede696ea023d8f35839a391244662340cc95a0f46965dda8b35cf", + "sha256:834816c316ad184cae7ecb615b69876a42cd2cafb07ee66c57a9c1ccacb63339", + "sha256:8746f0f727997ce4c7457dc1fec4e4e3c0fdd8803514baa3d1c4ea6515ab04b2", + "sha256:889e6855b77345622b4ba927335d3118745d590492941f5f78554f157d259e92", + "sha256:90d5d188a0cca0b9655ff2904ee61e7f183079e97550be98c2541a2eec358a72", + "sha256:9903f017b9d6f2f69ce241b424ddad7265624f64dc6eafbe257d45661febf8bd", + "sha256:a704165552496cbcc8bcc5921bb679fd6fa66bb1e758888de091b1223231c9f0", + "sha256:b1339b9ad3ee217253f246cde5c3789eb527cf9dd31ff0a1f5a8bf7fc89eadad", + "sha256:b5f72b66ccc0272de46b67346cf8490737ba2adc6a302664f5326e7741b6d5ab", + "sha256:beb36df519b8709e7d357c0c9639b03b885ca6355bbf5e53752c685de51605b8", + "sha256:c2c92545e1bc8e859d8d137aefa3b24843bd374b17c9814dafa3bbcea9fc4ec0", + "sha256:c6097bf39723e76ff5bba160daab92ae599df212c859db8d46648548584d04a8", + "sha256:d53f1a72b12cfd76b56934c33bc0cb7c1a295f23a2d3ffba8c764514c9b5e0ff", + "sha256:d69907d8d679cb5091a3f479c46bf1076f149f6311ff3298bac5089b86a2fab1", + "sha256:de17e012694e6a987bb4eb050dd7f0cf939195a8e00cb23aa93ebee5fd5e64a8", + "sha256:e720ff24370324c84a82b1a18195274715c23181748b2b9e3dacad24198ca06f", + "sha256:ed209f8f3cb9954773764b0fa2510a7a9247ad245593187ac90bd0747771bc5c", + "sha256:eea333dbcadb780356c54f5c4fa7754f143573b57508fff43d5daf63298eb26a", + "sha256:f2749602200aaa0e12a0f3f936dd6d4035384ad10d3acf7ac4f418c501683397", + "sha256:f63e79bfa2f82a7432c8b147ed321d01ca7769bc17cc04644286a4ce58d30549", + "sha256:fbcdd01db9e27741fb16a02b720cf02389d4b0b99cefe3c834c7df88c2d7412d", + "sha256:ffb4e4edf7997069719b9269926cc00a2a12c6e015422d1ebc2f621c4541396a" + ], + "version": "==1.4.0" }, "brotli": { "hashes": [ @@ -406,20 +469,20 @@ }, "cachetools": { "hashes": [ - "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945", - "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105" + "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474", + "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827" ], "markers": "python_version >= '3.7'", - "version": "==5.3.3" + "version": "==5.4.0" }, "certifi": { "hashes": [ - "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082", - "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9" + "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b", + "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90" ], "index": "pypi", "markers": "python_version >= '3.6'", - "version": "==2023.7.22" + "version": "==2024.7.4" }, "cffi": { "hashes": [ @@ -644,19 +707,11 @@ }, "cloudpickle": { "hashes": [ - "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7", - "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882" + "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f", + "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5" ], - "markers": "python_version >= '3.8'", - "version": "==3.0.0" - }, - "colorcet": { - "hashes": [ - "sha256:2921b3cd81a2288aaf2d63dbc0ce3c26dcd882e8c389cc505d6886bf7aa9a4eb", - "sha256:2a7d59cc8d0f7938eeedd08aad3152b5319b4ba3bcb7a612398cc17a384cb296" - ], - "markers": "python_version >= '3.7'", - "version": "==3.1.0" + "markers": "python_version >= '3.6'", + "version": "==2.2.1" }, "comm": { "hashes": [ @@ -797,48 +852,44 @@ "version": "==3.0.10" }, "dask": { - "hashes": [ - "sha256:32b34986519b7ddc0947c8ca63c2fc81b964e4c208dfb5cbf9f4f8aec92d152b", - "sha256:4f4c28ac406e81b8f21b5be4b31b21308808f3e0e7c7e2f4a914f16476d9941b" + "extras": [ + "array" ], - "markers": "python_version >= '3.8'", - "version": "==2023.5.0" - }, - "datashader": { "hashes": [ - "sha256:7899979b4c1adba6b4fd2e86caa3f5ef94c4e6ab234cbb7306ca6bfe243fc4df", - "sha256:c12ce9c71983a3f74ccba9d9416c270d1ed0a4e6c7bdcc6b63a2121ada0e00cd" + "sha256:81b80ee015b2e057b93bb2d1bf13a866136e762e2b24bf54b6b621e8b86b7708", + "sha256:d429d6b19e85fd1306ac37c188aaf99d03bbe69a6fe59d2b42882b2ac188686f" ], + "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==0.16.2" + "version": "==2024.6.2" }, "debugpy": { "hashes": [ - "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb", - "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146", - "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8", - "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242", - "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0", - "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741", - "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539", - "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23", - "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3", - "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39", - "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd", - "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9", - "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace", - "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42", - "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0", - "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7", - "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e", - "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234", - "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98", - "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703", - "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42", - "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099" + "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3", + "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9", + "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859", + "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755", + "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca", + "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad", + "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6", + "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02", + "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d", + "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835", + "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3", + "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2", + "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326", + "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a", + "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00", + "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634", + "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e", + "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031", + "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210", + "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa", + "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1", + "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47" ], "markers": "python_version >= '3.8'", - "version": "==1.8.1" + "version": "==1.8.2" }, "decorator": { "hashes": [ @@ -863,6 +914,15 @@ ], "version": "==0.3.8" }, + "distributed": { + "hashes": [ + "sha256:0c1f8ccb1da71273ad8c53c598147dc37e60bef17142fd466cb72618a521880f", + "sha256:bb43b766ada860b163956607c80f99871d823c645e326c2b5e35f020351adc55" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==2024.6.2" + }, "entrypoints": { "hashes": [ "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4", @@ -881,11 +941,11 @@ }, "exceptiongroup": { "hashes": [ - "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad", - "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16" + "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", + "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" ], "markers": "python_version < '3.11'", - "version": "==1.2.1" + "version": "==1.2.2" }, "executing": { "hashes": [ @@ -905,18 +965,18 @@ }, "fastjsonschema": { "hashes": [ - "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0", - "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d" + "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23", + "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a" ], - "version": "==2.19.1" + "version": "==2.20.0" }, "filelock": { "hashes": [ - "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8", - "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac" + "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb", + "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7" ], "markers": "python_version >= '3.8'", - "version": "==3.15.1" + "version": "==3.15.4" }, "fiona": { "hashes": [ @@ -970,51 +1030,51 @@ }, "fonttools": { "hashes": [ - "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d", - "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64", - "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2", - "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4", - "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6", - "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b", - "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f", - "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380", - "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e", - "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749", - "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20", - "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0", - "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4", - "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5", - "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206", - "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9", - "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac", - "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1", - "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce", - "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4", - "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12", - "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca", - "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d", - "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068", - "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796", - "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec", - "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea", - "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f", - "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005", - "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2", - "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06", - "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109", - "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002", - "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9", - "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a", - "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68", - "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6", - "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161", - "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd", - "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d", - "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee", - "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af" + "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122", + "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397", + "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f", + "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d", + "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60", + "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169", + "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8", + "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31", + "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923", + "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2", + "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb", + "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab", + "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb", + "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a", + "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670", + "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8", + "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407", + "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671", + "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88", + "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f", + "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f", + "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0", + "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb", + "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2", + "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d", + "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c", + "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3", + "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719", + "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749", + "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4", + "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f", + "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02", + "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58", + "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1", + "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41", + "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4", + "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb", + "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb", + "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3", + "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d", + "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d", + "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2" ], "markers": "python_version >= '3.8'", - "version": "==4.53.0" + "version": "==4.53.1" }, "fqdn": { "hashes": [ @@ -1108,19 +1168,19 @@ }, "fsspec": { "hashes": [ - "sha256:58d7122eb8a1a46f7f13453187bfea4972d66bf01618d37366521b1998034cee", - "sha256:f579960a56e6d8038a9efc8f9c77279ec12e6299aa86b0769a7e9c46b94527c2" + "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a", + "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af" ], "markers": "python_version >= '3.8'", - "version": "==2024.6.0" + "version": "==2023.6.0" }, "geocube": { "hashes": [ - "sha256:2fb9e193c67f47f47cdade85748f17cd69bf8ac1cf5194d683846022b8d5c4e4", - "sha256:908e13900535a8ba2d6598ed3e69924f8dc60620bde242d760655d5a2a8244f2" + "sha256:0b06a1401df539d559703053d380f0e10bb7e013653ce9acb5e164e36f0a4ab3", + "sha256:bf49d7b19c4d78740c095ab93f1f10f448f131338ccc74de497ec5b437273730" ], "markers": "python_version >= '3.10'", - "version": "==0.5.2" + "version": "==0.5.3" }, "geographiclib": { "hashes": [ @@ -1149,20 +1209,20 @@ }, "gval": { "hashes": [ - "sha256:d1a560f9e8b9b7727c618a56d7a04f3621ceb71477f18410c811a6dc0bd9b05d", - "sha256:d9f26aa88f164b77a47cb7d7eafc1641e48abe4152501657cb173c44dc84a8fd" + "sha256:5721270a41278888c043a3fc1c91c26998799f31edf151a6383afd8e2bd9de9a", + "sha256:9c9e1d9710e237a95cbb8385d7c41f21e0e64051fe526985373a2bc80bf5fa87" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==0.2.3" + "version": "==0.2.7" }, "identify": { "hashes": [ - "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa", - "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d" + "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf", + "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0" ], "markers": "python_version >= '3.8'", - "version": "==2.5.36" + "version": "==2.6.0" }, "idna": { "hashes": [ @@ -1174,11 +1234,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570", - "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2" + "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f", + "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812" ], - "markers": "python_version >= '3.8'", - "version": "==7.1.0" + "markers": "python_version < '3.12'", + "version": "==8.0.0" }, "inflate64": { "hashes": [ @@ -1239,21 +1299,13 @@ "markers": "python_version >= '3.7'", "version": "==1.0.0" }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, "ipykernel": { "hashes": [ - "sha256:1181e653d95c6808039c509ef8e67c4126b3b3af7781496c7cbfb5ed938a27da", - "sha256:3d44070060f9475ac2092b760123fadf105d2e2493c24848b6691a7c4f42af5c" + "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", + "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215" ], "markers": "python_version >= '3.8'", - "version": "==6.29.4" + "version": "==6.29.5" }, "ipympl": { "hashes": [ @@ -1265,11 +1317,11 @@ }, "ipython": { "hashes": [ - "sha256:53eee7ad44df903a06655871cbab66d156a051fd86f3ec6750470ac9604ac1ab", - "sha256:c6ed726a140b6e725b911528f80439c534fac915246af3efc39440a6b0f9d716" + "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c", + "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff" ], "markers": "python_version >= '3.10'", - "version": "==8.25.0" + "version": "==8.26.0" }, "ipython-genutils": { "hashes": [ @@ -1354,11 +1406,11 @@ "format-nongpl" ], "hashes": [ - "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7", - "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802" + "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", + "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566" ], "markers": "python_version >= '3.8'", - "version": "==4.22.0" + "version": "==4.23.0" }, "jsonschema-specifications": { "hashes": [ @@ -1411,11 +1463,11 @@ }, "jupyter-server": { "hashes": [ - "sha256:12558d158ec7a0653bf96cc272bc7ad79e0127d503b982ed144399346694f726", - "sha256:16f7177c3a4ea8fe37784e2d31271981a812f0b2874af17339031dc3510cc2a5" + "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd", + "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b" ], "markers": "python_version >= '3.8'", - "version": "==2.14.1" + "version": "==2.14.2" }, "jupyter-server-fileid": { "hashes": [ @@ -1468,11 +1520,11 @@ }, "jupyterlab-server": { "hashes": [ - "sha256:15cbb349dc45e954e09bacf81b9f9bcb10815ff660fb2034ecd7417db3a7ea27", - "sha256:54aa2d64fd86383b5438d9f0c032f043c4d8c0264b8af9f60bd061157466ea43" + "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4", + "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4" ], "markers": "python_version >= '3.8'", - "version": "==2.27.2" + "version": "==2.27.3" }, "jupyterlab-widgets": { "hashes": [ @@ -1594,37 +1646,30 @@ }, "llvmlite": { "hashes": [ - "sha256:03aee0ccd81735696474dc4f8b6be60774892a2929d6c05d093d17392c237f32", - "sha256:1578f5000fdce513712e99543c50e93758a954297575610f48cb1fd71b27c08a", - "sha256:16f56eb1eec3cda3a5c526bc3f63594fc24e0c8d219375afeb336f289764c6c7", - "sha256:1ec3d70b3e507515936e475d9811305f52d049281eaa6c8273448a61c9b5b7e2", - "sha256:22d36591cd5d02038912321d9ab8e4668e53ae2211da5523f454e992b5e13c36", - "sha256:3803f11ad5f6f6c3d2b545a303d68d9fabb1d50e06a8d6418e6fcd2d0df00959", - "sha256:39dc2160aed36e989610fc403487f11b8764b6650017ff367e45384dff88ffbf", - "sha256:3fc14e757bc07a919221f0cbaacb512704ce5774d7fcada793f1996d6bc75f2a", - "sha256:4c6ebace910410daf0bebda09c1859504fc2f33d122e9a971c4c349c89cca630", - "sha256:50aea09a2b933dab7c9df92361b1844ad3145bfb8dd2deb9cd8b8917d59306fb", - "sha256:60f8dd1e76f47b3dbdee4b38d9189f3e020d22a173c00f930b52131001d801f9", - "sha256:62c0ea22e0b9dffb020601bb65cb11dd967a095a488be73f07d8867f4e327ca5", - "sha256:6546bed4e02a1c3d53a22a0bced254b3b6894693318b16c16c8e43e29d6befb6", - "sha256:6717c7a6e93c9d2c3d07c07113ec80ae24af45cde536b34363d4bcd9188091d9", - "sha256:7ebf1eb9badc2a397d4f6a6c8717447c81ac011db00064a00408bc83c923c0e4", - "sha256:9ffc84ade195abd4abcf0bd3b827b9140ae9ef90999429b9ea84d5df69c9058c", - "sha256:a3f331a323d0f0ada6b10d60182ef06c20a2f01be21699999d204c5750ffd0b4", - "sha256:b1a0bbdb274fb683f993198775b957d29a6f07b45d184c571ef2a721ce4388cf", - "sha256:b43abd7c82e805261c425d50335be9a6c4f84264e34d6d6e475207300005d572", - "sha256:c0f158e4708dda6367d21cf15afc58de4ebce979c7a1aa2f6b977aae737e2a54", - "sha256:d0bfd18c324549c0fec2c5dc610fd024689de6f27c6cc67e4e24a07541d6e49b", - "sha256:ddab526c5a2c4ccb8c9ec4821fcea7606933dc53f510e2a6eebb45a418d3488a", - "sha256:e172c73fccf7d6db4bd6f7de963dedded900d1a5c6778733241d878ba613980e", - "sha256:e2c00ff204afa721b0bb9835b5bf1ba7fba210eefcec5552a9e05a63219ba0dc", - "sha256:e31f4b799d530255aaf0566e3da2df5bfc35d3cd9d6d5a3dcc251663656c27b1", - "sha256:e4f212c018db951da3e1dc25c2651abc688221934739721f2dad5ff1dd5f90e7", - "sha256:fa9b26939ae553bf30a9f5c4c754db0fb2d2677327f2511e674aa2f5df941789", - "sha256:fb62fc7016b592435d3e3a8f680e3ea8897c3c9e62e6e6cc58011e7a4801439e" + "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed", + "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8", + "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7", + "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98", + "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4", + "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a", + "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc", + "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a", + "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9", + "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead", + "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749", + "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c", + "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761", + "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5", + "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867", + "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2", + "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91", + "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844", + "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57", + "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f", + "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7" ], - "markers": "python_version >= '3.7'", - "version": "==0.39.1" + "markers": "python_version >= '3.9'", + "version": "==0.43.0" }, "lmoments3": { "hashes": [ @@ -1787,6 +1832,15 @@ "markers": "python_version >= '3.7'", "version": "==3.0.2" }, + "monaco": { + "hashes": [ + "sha256:23c0d4f1603bbc6f91af26fcb176de6c0cab531f137d789aaaf467eddd8ac2a4", + "sha256:913dd70b9565e0c337c46d484785d2047e276e64189ab5ff06c717561ad3b01d" + ], + "index": "pypi", + "markers": "python_version < '3.13' and python_full_version >= '3.9.0'", + "version": "==0.13.1" + }, "morecantile": { "hashes": [ "sha256:610f1dcc3ae0a99f0a0e6c05e18508d0e9e26b53279e06a65150939e2f54963b", @@ -1955,18 +2009,11 @@ }, "multimethod": { "hashes": [ - "sha256:7f2a4863967142e6db68632fef9cd79053c09670ba0c5f113301e245140bba5c", - "sha256:cb338f09395c0ee87d36c7691cdd794d13d8864358082cf1205f812edd5ce05a" + "sha256:8db8ef2a8d2a247e3570cc23317680892fdf903d84c8c1053667c8e8f7671a67", + "sha256:fd0c473c43558908d97cc06e4d68e8f69202f167db46f7b4e4058893e7dbdf60" ], "markers": "python_version >= '3.9'", - "version": "==1.11.2" - }, - "multipledispatch": { - "hashes": [ - "sha256:0c53cd8b077546da4e48869f49b13164bebafd0c2a5afceb6bb6a316e7fb46e4", - "sha256:5c839915465c68206c3e9c473357908216c28383b425361e5d144594bf85a7e0" - ], - "version": "==1.0.0" + "version": "==1.12" }, "multivolumefile": { "hashes": [ @@ -2109,135 +2156,128 @@ }, "numba": { "hashes": [ - "sha256:0240f9026b015e336069329839208ebd70ec34ae5bfbf402e4fcc8e06197528e", - "sha256:03634579d10a6129181129de293dd6b5eaabee86881369d24d63f8fe352dd6cb", - "sha256:03fe94cd31e96185cce2fae005334a8cc712fc2ba7756e52dff8c9400718173f", - "sha256:0611e6d3eebe4cb903f1a836ffdb2bda8d18482bcd0a0dcc56e79e2aa3fefef5", - "sha256:0da583c532cd72feefd8e551435747e0e0fbb3c0530357e6845fcc11e38d6aea", - "sha256:14dbbabf6ffcd96ee2ac827389afa59a70ffa9f089576500434c34abf9b054a4", - "sha256:32d9fef412c81483d7efe0ceb6cf4d3310fde8b624a9cecca00f790573ac96ee", - "sha256:3a993349b90569518739009d8f4b523dfedd7e0049e6838c0e17435c3e70dcc4", - "sha256:3cb1a07a082a61df80a468f232e452d818f5ae254b40c26390054e4e868556e0", - "sha256:42f9e1be942b215df7e6cc9948cf9c15bb8170acc8286c063a9e57994ef82fd1", - "sha256:4373da9757049db7c90591e9ec55a2e97b2b36ba7ae3bf9c956a513374077470", - "sha256:4e08e203b163ace08bad500b0c16f6092b1eb34fd1fce4feaf31a67a3a5ecf3b", - "sha256:553da2ce74e8862e18a72a209ed3b6d2924403bdd0fb341fa891c6455545ba7c", - "sha256:720886b852a2d62619ae3900fe71f1852c62db4f287d0c275a60219e1643fc04", - "sha256:85dbaed7a05ff96492b69a8900c5ba605551afb9b27774f7f10511095451137c", - "sha256:8a95ca9cc77ea4571081f6594e08bd272b66060634b8324e99cd1843020364f9", - "sha256:91f021145a8081f881996818474ef737800bcc613ffb1e618a655725a0f9e246", - "sha256:9f62672145f8669ec08762895fe85f4cf0ead08ce3164667f2b94b2f62ab23c3", - "sha256:a12ef323c0f2101529d455cfde7f4135eaa147bad17afe10b48634f796d96abd", - "sha256:c602d015478b7958408d788ba00a50272649c5186ea8baa6cf71d4a1c761bba1", - "sha256:c75e8a5f810ce80a0cfad6e74ee94f9fde9b40c81312949bf356b7304ef20740", - "sha256:d0ae9270a7a5cc0ede63cd234b4ff1ce166c7a749b91dbbf45e0000c56d3eade", - "sha256:d69ad934e13c15684e7887100a8f5f0f61d7a8e57e0fd29d9993210089a5b531", - "sha256:dbcc847bac2d225265d054993a7f910fda66e73d6662fe7156452cac0325b073", - "sha256:e64d338b504c9394a4a34942df4627e1e6cb07396ee3b49fe7b8d6420aa5104f", - "sha256:f4cfc3a19d1e26448032049c79fc60331b104f694cf570a9e94f4e2c9d0932bb", - "sha256:fbfb45e7b297749029cb28694abf437a78695a100e7c2033983d69f0ba2698d4", - "sha256:fcdf84ba3ed8124eb7234adfbb8792f311991cbf8aed1cad4b1b1a7ee08380c1" + "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74", + "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b", + "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d", + "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781", + "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b", + "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198", + "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab", + "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c", + "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b", + "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8", + "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651", + "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16", + "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703", + "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e", + "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449", + "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8", + "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25", + "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2", + "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404", + "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347", + "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e" ], "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==0.56.4" + "markers": "python_version >= '3.9'", + "version": "==0.60.0" }, "numcodecs": { "hashes": [ - "sha256:05d91a433733e7eef268d7e80ec226a0232da244289614a8f3826901aec1098e", - "sha256:0e79bf9d1d37199ac00a60ff3adb64757523291d19d03116832e600cac391c51", - "sha256:135b2d47563f7b9dc5ee6ce3d1b81b0f1397f69309e909f1a35bb0f7c553d45e", - "sha256:21d8267bd4313f4d16f5b6287731d4c8ebdab236038f29ad1b0e93c9b2ca64ee", - "sha256:29dfb195f835a55c4d490fb097aac8c1bcb96c54cf1b037d9218492c95e9d8c5", - "sha256:2f1ba2f4af3fd3ba65b1bcffb717fe65efe101a50a91c368f79f3101dbb1e243", - "sha256:2f84df6b8693206365a5b37c005bfa9d1be486122bde683a7b6446af4b75d862", - "sha256:2fbb12a6a1abe95926f25c65e283762d63a9bf9e43c0de2c6a1a798347dfcb40", - "sha256:760627780a8b6afdb7f942f2a0ddaf4e31d3d7eea1d8498cf0fd3204a33c4618", - "sha256:82d7107f80f9307235cb7e74719292d101c7ea1e393fe628817f0d635b7384f5", - "sha256:941b7446b68cf79f089bcfe92edaa3b154533dcbcd82474f994b28f2eedb1c60", - "sha256:a191a8e347ecd016e5c357f2bf41fbcb026f6ffe78fff50c77ab12e96701d155", - "sha256:abff3554a6892a89aacf7b642a044e4535499edf07aeae2f2e6e8fc08c9ba07f", - "sha256:c17687b1fd1fef68af616bc83f896035d24e40e04e91e7e6dae56379eb59fe33", - "sha256:c258bd1d3dfa75a9b708540d23b2da43d63607f9df76dfa0309a7597d1de3b73", - "sha256:caf1a1e6678aab9c1e29d2109b299f7a467bd4d4c34235b1f0e082167846b88f", - "sha256:d37f628fe92b3699e65831d5733feca74d2e33b50ef29118ffd41c13c677210e", - "sha256:e04649ea504aff858dbe294631f098fbfd671baf58bfc04fc48d746554c05d67", - "sha256:eeaf42768910f1c6eebf6c1bb00160728e62c9343df9e2e315dc9fe12e3f6071", - "sha256:ef964d4860d3e6b38df0633caf3e51dc850a6293fd8e93240473642681d95136", - "sha256:f2207871868b2464dc11c513965fd99b958a9d7cde2629be7b2dc84fdaab013b" + "sha256:17bc4b568214582f4c623700592f633f3afd920848630049c584fa1e535253ad", + "sha256:208cab0f4d9cf4409e9c4a4c935e165833786614822c81dee9d865af372da9df", + "sha256:56e49f68ce6aeba29f144992524c8897d94f846d02bbcc820dd29d7c5c2a073e", + "sha256:820be89729583c91601a6b35c052008cdd2665b25bfedb91b367cc155fb34ba0", + "sha256:a68368d3ce625ec76fcacd84785f6110d30a232909d5c6093a7aa25628880477", + "sha256:ac4dd5556fb126271e93bd1a02266e21b01d3617db448d70d00eec8e034506b4", + "sha256:ba4fac7036ea5a078c7afe1d4dffeb9685080d42f19c9c16b12dad866703aa2e", + "sha256:d67a859dd8a7f026829e91cb1799c26720cc9d29ee4ae0060cc7a581670abc06", + "sha256:e7d3b9693df52eeaf978d2a56971d01cf9b4e284ae769ec764807f2087cce51d", + "sha256:eed420a9c62d0a569aa94a387f93045f068ad3e7bbd787c6ce70bc5fefbaa7d9", + "sha256:f208a1b8b5e66c767ed043812ca74d9045e09b7b2e085d064a585c30b9efc8e7", + "sha256:f3cf462d2357998d7f6baaa0427657b0eeda3eb79fba2b146d2d04542912a513", + "sha256:f5904216811f2e9d312c23ffaad3b3d4c7442a3583d3a8bf81ca8319e9f5deb5" ], - "markers": "python_version >= '3.8'", - "version": "==0.12.1" + "markers": "python_version >= '3.10'", + "version": "==0.13.0" }, "numexpr": { "hashes": [ - "sha256:03d0ba492e484a5a1aeb24b300c4213ed168f2c246177be5733abb4e18cbb043", - "sha256:04e8620e7e676504201d4082e7b3ee2d9b561d1cb9470b47a6104e10c1e2870e", - "sha256:05278bad96b5846d712eba58b44e5cec743bdb3e19ca624916c921d049fdbcf6", - "sha256:10789450032357afaeda4ac4d06da9542d1535c13151e8d32b49ae1a488d1358", - "sha256:1af6dc6b3bd2e11a802337b352bf58f30df0b70be16c4f863b70a3af3a8ef95e", - "sha256:1d8eb88b0ae3d3c609d732a17e71096779b2bf47b3a084320ffa93d9f9132786", - "sha256:3c66dc0188358cdcc9465b6ee54fd5eef2e83ac64b1d4ba9117c41df59bf6fca", - "sha256:416e0e9f0fc4cced67767585e44cb6b301728bdb9edbb7c534a853222ec62cac", - "sha256:4f0b045e1831953a47cc9fabae76a6794c69cbb60921751a5cf2d555034c55bf", - "sha256:4feafc65ea3044b8bf8f305b757a928e59167a310630c22b97a57dff07a56490", - "sha256:56d0d96b130f7cd4d78d0017030d6a0e9d9fc2a717ac51d4cf4860b39637e86a", - "sha256:629b66cc1b750671e7fb396506b3f9410612e5bd8bc1dd55b5a0a0041d839f95", - "sha256:6b5f8242c075477156d26b3a6b8e0cd0a06d4c8eb68d907bde56dd3c9c683e92", - "sha256:745b46a1fb76920a3eebfaf26e50bc94a9c13b5aee34b256ab4b2d792dbaa9ca", - "sha256:748e8d4cde22d9a5603165293fb293a4de1a4623513299416c64fdab557118c2", - "sha256:78e0a8bc4417c3dedcbae3c473505b69080535246edc977c7dccf3ec8454a685", - "sha256:83f1e7a7f7ee741b8dcd20c56c3f862a3a3ec26fa8b9fcadb7dcd819876d2f35", - "sha256:937d36c6d3cf15601f26f84f0f706649f976491e9e0892d16cd7c876d77fa7dc", - "sha256:96a64d0dd8f8e694da3f8582d73d7da8446ff375f6dd239b546010efea371ac3", - "sha256:a602692cd52ce923ce8a0a90fb1d6cf186ebe8706eed83eee0de685e634b9aa9", - "sha256:a6cdf9e64c5b3dbb61729edb505ea75ee212fa02b85c5b1d851331381ae3b0e1", - "sha256:b276e2ba3e87ace9a30fd49078ad5dcdc6a1674d030b1ec132599c55465c0346", - "sha256:c7517b774d309b1f0896c89bdd1ddd33c4418a92ecfbe5e1df3ac698698f6fcf", - "sha256:c89e930752639df040539160326d8f99a84159bbea41943ab8e960591edaaef0", - "sha256:cb5e12787101f1216f2cdabedc3417748f2e1f472442e16bbfabf0bab2336300", - "sha256:d47bb567e330ebe86781864219a36cbccb3a47aec893bd509f0139c6b23e8104", - "sha256:dc3506c30c03b082da2cadef43747d474e5170c1f58a6dcdf882b3dc88b1e849", - "sha256:e3a973265591b0a875fd1151c4549e468959c7192821aac0bb86937694a08efa", - "sha256:efa63ecdc9fcaf582045639ddcf56e9bdc1f4d9a01729be528f62df4db86c9d6" + "sha256:00204e5853713b5eba5f3d0bc586a5d8d07f76011b597c8b4087592cc2ec2928", + "sha256:22cc65e9121aeb3187a2b50827715b2b087ea70e8ab21416ea52662322087b43", + "sha256:300e577b3c006dd7a8270f1bb2e8a00ee15bf235b1650fe2a6febec2954bc2c3", + "sha256:368a1972c3186355160f6ee330a7eea146d8443da75a38a30083289ae251ef5a", + "sha256:37598cca41f8f50dc889b0b72be1616a288758c16ab7d48c9ac8719e1a39d835", + "sha256:44f6d12a8c44be90199bbb10d3abf467f88951f48a3d1fbbd3c219d121f39c9d", + "sha256:45f598182b4f5c153222e47d5163c3bee8d5ebcaee7e56dd2a5898d4d97e4473", + "sha256:4f0985bd1c493b23b5aad7d81fa174798f3812efb78d14844194834c9fee38b8", + "sha256:552c8d4b2e3b87cdb2abb40a781b9a61a9090a9f66ac7357fc5a0b93aff76be3", + "sha256:56648a04679063175681195670ad53e5c8ca19668166ed13875199b5600089c7", + "sha256:5a4db4456e0779d5e024220b7b6a7477ac900679bfa74836b06fa526aaed4e3c", + "sha256:6a50370bea77ba94c3734a44781c716751354c6bfda2d369af3aed3d67d42871", + "sha256:78b14c19c403df7498954468385768c86b0d2c52ad03dffb74e45d44ae5a9c77", + "sha256:82bf04a1495ac475de4ab49fbe0a3a2710ed3fd1a00bc03847316b5d7602402d", + "sha256:82fc95c301b15ff4823f98989ee363a2d5555d16a7cfd3710e98ddee726eaaaa", + "sha256:926dd426c68f1d927412a2ad843831c1eb9a95871e7bb0bd8b20d547c12238d2", + "sha256:9bba99d354a65f1a008ab8b87f07d84404c668e66bab624df5b6b5373403cf81", + "sha256:a3c0b0bf165b2d886eb981afa4e77873ca076f5d51c491c4d7b8fc10f17c876f", + "sha256:ac23a72eff10f928f23b147bdeb0f1b774e862abe332fc9bf4837e9f1bc0bbf9", + "sha256:b28eaf45f1cc1048aad9e90e3a8ada1aef58c5f8155a85267dc781b37998c046", + "sha256:b2efa499f460124538a5b4f1bf2e77b28eb443ee244cc5573ed0f6a069ebc635", + "sha256:bbd35f17f6efc00ebd4a480192af1ee30996094a0d5343b131b0e90e61e8b554", + "sha256:ca8ae46481d0b0689ca0d00a8670bc464ce375e349599fe674a6d4957e7b7eb6", + "sha256:cbf79fef834f88607f977ab9867061dcd9b40ccb08bb28547c6dc6c73e560895", + "sha256:ce04ae6efe2a9d0be1a0e114115c3ae70c68b8b8fbc615c5c55c15704b01e6a4", + "sha256:fa4009d84a8e6e21790e718a80a22d57fe7f215283576ef2adc4183f7247f3c7", + "sha256:fb704620657a1c99d64933e8a982148d8bfb2b738a1943e107a2bfdee887ce56", + "sha256:fcbf013bb8494e8ef1d11fa3457827c1571c6a3153982d709e5d17594999d4dd", + "sha256:fecdf4bf3c1250e56583db0a4a80382a259ba4c2e1efa13e04ed43f0938071f5" ], "markers": "python_version >= '3.9'", - "version": "==2.10.0" + "version": "==2.10.1" }, "numpy": { "hashes": [ - "sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d", - "sha256:06005a2ef6014e9956c09ba07654f9837d9e26696a0470e42beedadb78c11b07", - "sha256:09b7847f7e83ca37c6e627682f145856de331049013853f344f37b0c9690e3df", - "sha256:0aaee12d8883552fadfc41e96b4c82ee7d794949e2a7c3b3a7201e968c7ecab9", - "sha256:0cbe9848fad08baf71de1a39e12d1b6310f1d5b2d0ea4de051058e6e1076852d", - "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a", - "sha256:33161613d2269025873025b33e879825ec7b1d831317e68f4f2f0f84ed14c719", - "sha256:5039f55555e1eab31124a5768898c9e22c25a65c1e0037f4d7c495a45778c9f2", - "sha256:522e26bbf6377e4d76403826ed689c295b0b238f46c28a7251ab94716da0b280", - "sha256:56e454c7833e94ec9769fa0f86e6ff8e42ee38ce0ce1fa4cbb747ea7e06d56aa", - "sha256:58f545efd1108e647604a1b5aa809591ccd2540f468a880bedb97247e72db387", - "sha256:5e05b1c973a9f858c74367553e236f287e749465f773328c8ef31abe18f691e1", - "sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43", - "sha256:8969bfd28e85c81f3f94eb4a66bc2cf1dbdc5c18efc320af34bffc54d6b1e38f", - "sha256:92c8c1e89a1f5028a4c6d9e3ccbe311b6ba53694811269b992c0b224269e2398", - "sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63", - "sha256:a7ac231a08bb37f852849bbb387a20a57574a97cfc7b6cabb488a4fc8be176de", - "sha256:abdde9f795cf292fb9651ed48185503a2ff29be87770c3b8e2a14b0cd7aa16f8", - "sha256:af1da88f6bc3d2338ebbf0e22fe487821ea4d8e89053e25fa59d1d79786e7481", - "sha256:b2a9ab7c279c91974f756c84c365a669a887efa287365a8e2c418f8b3ba73fb0", - "sha256:bf837dc63ba5c06dc8797c398db1e223a466c7ece27a1f7b5232ba3466aafe3d", - "sha256:ca51fcfcc5f9354c45f400059e88bc09215fb71a48d3768fb80e357f3b457e1e", - "sha256:ce571367b6dfe60af04e04a1834ca2dc5f46004ac1cc756fb95319f64c095a96", - "sha256:d208a0f8729f3fb790ed18a003f3a57895b989b40ea4dce4717e9cf4af62c6bb", - "sha256:dbee87b469018961d1ad79b1a5d50c0ae850000b639bcb1b694e9981083243b6", - "sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d", - "sha256:f063b69b090c9d918f9df0a12116029e274daf0181df392839661c4c7ec9018a", - "sha256:f9a909a8bae284d46bbfdefbdd4a262ba19d3bc9921b1e76126b1d21c3c34135" + "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", + "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", + "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", + "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", + "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", + "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", + "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea", + "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c", + "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", + "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", + "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be", + "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", + "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", + "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", + "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", + "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd", + "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c", + "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", + "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0", + "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c", + "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", + "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", + "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", + "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6", + "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", + "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", + "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30", + "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", + "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", + "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", + "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", + "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", + "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764", + "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", + "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", + "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==1.23.5" + "markers": "python_version >= '3.9'", + "version": "==1.26.4" }, "numpy-groupies": { "hashes": [ @@ -2337,14 +2377,6 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.5.1" }, - "param": { - "hashes": [ - "sha256:a7b30b08b547e2b78b02aeba6ed34e3c6a638f8e4824a76a96ffa2d7cf57e71f", - "sha256:f31d3745d227347d29b5868c4e4e3077df07463889b91d3bb28e634fde211e1c" - ], - "markers": "python_version >= '3.8'", - "version": "==2.1.0" - }, "parso": { "hashes": [ "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", @@ -2461,14 +2493,6 @@ "markers": "python_version >= '3.8'", "version": "==4.2.2" }, - "pluggy": { - "hashes": [ - "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", - "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" - ], - "markers": "python_version >= '3.8'", - "version": "==1.5.0" - }, "pre-commit": { "hashes": [ "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb", @@ -2494,6 +2518,13 @@ "markers": "python_full_version >= '3.7.0'", "version": "==3.0.47" }, + "properscoring": { + "hashes": [ + "sha256:b0cc4963cc218b728d6c5f77b3259c8f835ae00e32e82678cdf6936049b93961", + "sha256:f84d5b06c13549d0171ce52ad7b45c6f5726ac44b733d24af5c60654cbb821dc" + ], + "version": "==0.1" + }, "psutil": { "hashes": [ "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d", @@ -2513,7 +2544,7 @@ "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4", "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8" ], - "markers": "sys_platform != 'cygwin'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", "version": "==5.9.8" }, "psycopg2-binary": { @@ -2760,55 +2791,54 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.20.0" }, - "pyct": { - "hashes": [ - "sha256:a4038a8885059ab8cac6f946ea30e0b5e6bdbe0b92b6723f06737035f9d65e8c", - "sha256:dd9f4ac5cbd8e37c352c04036062d3c5f67efec76d404761ef16b0cbf26aa6a0" - ], - "markers": "python_version >= '3.7'", - "version": "==0.5.0" - }, "pydantic": { "hashes": [ - "sha256:20a3b30fd255eeeb63caa9483502ba96b7795ce5bf895c6a179b3d909d9f53a6", - "sha256:2b71bd504d1573b0b722ae536e8ffb796bedeef978979d076bf206e77dcc55a5", - "sha256:3403a090db45d4027d2344859d86eb797484dfda0706cf87af79ace6a35274ef", - "sha256:37ebddef68370e6f26243acc94de56d291e01227a67b2ace26ea3543cf53dd5f", - "sha256:3b8d5bd97886f9eb59260594207c9f57dce14a6f869c6ceea90188715d29921a", - "sha256:409b810f387610cc7405ab2fa6f62bdf7ea485311845a242ebc0bd0496e7e5ac", - "sha256:4870f13a4fafd5bc3e93cff3169222534fad867918b188e83ee0496452978437", - "sha256:566a04ba755e8f701b074ffb134ddb4d429f75d5dced3fbd829a527aafe74c71", - "sha256:67b3714b97ff84b2689654851c2426389bcabfac9080617bcf4306c69db606f6", - "sha256:6dab5219659f95e357d98d70577b361383057fb4414cfdb587014a5f5c595f7b", - "sha256:748d10ab6089c5d196e1c8be9de48274f71457b01e59736f7a09c9dc34f51887", - "sha256:762aa598f79b4cac2f275d13336b2dd8662febee2a9c450a49a2ab3bec4b385f", - "sha256:7a26841be620309a9697f5b1ffc47dce74909e350c5315ccdac7a853484d468a", - "sha256:7a7db03339893feef2092ff7b1afc9497beed15ebd4af84c3042a74abce02d48", - "sha256:7aa75d1bd9cc275cf9782f50f60cddaf74cbaae19b6ada2a28e737edac420312", - "sha256:86936c383f7c38fd26d35107eb669c85d8f46dfceae873264d9bab46fe1c7dde", - "sha256:88546dc10a40b5b52cae87d64666787aeb2878f9a9b37825aedc2f362e7ae1da", - "sha256:8c40964596809eb616d94f9c7944511f620a1103d63d5510440ed2908fc410af", - "sha256:990027e77cda6072a566e433b6962ca3b96b4f3ae8bd54748e9d62a58284d9d7", - "sha256:9965e49c6905840e526e5429b09e4c154355b6ecc0a2f05492eda2928190311d", - "sha256:9f62a727f5c590c78c2d12fda302d1895141b767c6488fe623098f8792255fe5", - "sha256:a2d5be50ac4a0976817144c7d653e34df2f9436d15555189f5b6f61161d64183", - "sha256:a5939ec826f7faec434e2d406ff5e4eaf1716eb1f247d68cd3d0b3612f7b4c8a", - "sha256:aac218feb4af73db8417ca7518fb3bade4534fcca6e3fb00f84966811dd94450", - "sha256:adad1ee4ab9888f12dac2529276704e719efcf472e38df7813f5284db699b4ec", - "sha256:b69f9138dec566962ec65623c9d57bee44412d2fc71065a5f3ebb3820bdeee96", - "sha256:c41bbaae89e32fc582448e71974de738c055aef5ab474fb25692981a08df808a", - "sha256:c62376890b819bebe3c717a9ac841a532988372b7e600e76f75c9f7c128219d5", - "sha256:ce937a2a2c020bcad1c9fde02892392a1123de6dda906ddba62bfe8f3e5989a2", - "sha256:db4c7f7e60ca6f7d6c1785070f3e5771fcb9b2d88546e334d2f2c3934d949028", - "sha256:e0014e29637125f4997c174dd6167407162d7af0da73414a9340461ea8573252", - "sha256:e088e3865a2270ecbc369924cd7d9fbc565667d9158e7f304e4097ebb9cf98dd", - "sha256:ea9eebc2ebcba3717e77cdeee3f6203ffc0e78db5f7482c68b1293e8cc156e5e", - "sha256:edfdf0a5abc5c9bf2052ebaec20e67abd52e92d257e4f2d30e02c354ed3e6030", - "sha256:f3d4ee957a727ccb5a36f1b0a6dbd9fad5dedd2a41eada99a8df55c12896e18d", - "sha256:f79db3652ed743309f116ba863dae0c974a41b688242482638b892246b7db21d" + "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f", + "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc", + "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b", + "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b", + "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b", + "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e", + "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3", + "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7", + "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227", + "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f", + "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6", + "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab", + "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad", + "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076", + "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681", + "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54", + "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb", + "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7", + "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe", + "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b", + "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab", + "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d", + "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0", + "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75", + "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741", + "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63", + "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd", + "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33", + "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815", + "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7", + "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a", + "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655", + "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773", + "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c", + "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7", + "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3", + "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768", + "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d", + "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688", + "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f", + "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e", + "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991", + "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a" ], "markers": "python_version >= '3.7'", - "version": "==1.10.10" + "version": "==1.10.17" }, "pyflakes": { "hashes": [ @@ -3001,14 +3031,24 @@ "markers": "python_version >= '3.8'", "version": "==3.5.0" }, - "pytest": { + "pystac": { + "extras": [ + "validation" + ], "hashes": [ - "sha256:58ecc27ebf0ea643ebfdf7fb1249335da761a00c9f955bcd922349bcb68ee57d", - "sha256:933051fa1bfbd38a21e73c3960cebdad4cf59483ddba7696c48509727e17f201" + "sha256:4617fe5315a79785f79b616b8ac248ba3d4d561457c8300b34573309715808cd", + "sha256:a7c31b3dacc44dfc955d9da8c7351c7b5b99100254b36301a1e312709b51bf2f" ], - "index": "pypi", - "markers": "python_version >= '3.7'", - "version": "==7.3.0" + "markers": "python_version >= '3.9'", + "version": "==1.10.1" + }, + "pystac-client": { + "hashes": [ + "sha256:4b0ed0f7177dfc6e394aeb3ecf1236364f315b1d38c107afbcbbef17c2f7db8b", + "sha256:b07c21f0bfbe7ea19cd23e535406ee08ee604b8ff8d9afcee666c0b1fe017dc4" + ], + "markers": "python_version >= '3.8'", + "version": "==0.7.5" }, "python-dateutil": { "hashes": [ @@ -3399,116 +3439,116 @@ }, "rioxarray": { "hashes": [ - "sha256:433b169cd10346ed5fe0123e61fafca4dcaf5ce1b6e2f6cc1a9c0e0bf7d7c1d4", - "sha256:849979c3542cefb5ac452af474a0b50ff08b7435c6db1ad615ddb65b1da4bbf3" + "sha256:a98ea9306739f119b63ffc2245f5d7d23ca1638b99c50ca282d932901f9272e8", + "sha256:b0f4a8917bde79c15b1507296bd256de1a33923867f5e2e3078bda2b6ce3673f" ], "markers": "python_version >= '3.10'", - "version": "==0.15.5" + "version": "==0.16.0" }, "rpds-py": { "hashes": [ - "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee", - "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc", - "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc", - "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944", - "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20", - "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7", - "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4", - "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6", - "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6", - "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93", - "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633", - "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0", - "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360", - "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8", - "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139", - "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7", - "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a", - "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9", - "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26", - "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724", - "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72", - "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b", - "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09", - "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100", - "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3", - "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261", - "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3", - "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9", - "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b", - "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3", - "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de", - "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d", - "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e", - "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8", - "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff", - "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5", - "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c", - "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e", - "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e", - "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4", - "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8", - "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922", - "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338", - "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d", - "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8", - "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2", - "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72", - "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80", - "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644", - "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae", - "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163", - "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104", - "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d", - "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60", - "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a", - "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d", - "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07", - "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49", - "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10", - "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f", - "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2", - "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8", - "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7", - "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88", - "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65", - "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0", - "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909", - "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8", - "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c", - "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184", - "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397", - "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a", - "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346", - "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590", - "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333", - "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb", - "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74", - "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e", - "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d", - "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa", - "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f", - "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53", - "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1", - "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac", - "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0", - "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd", - "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611", - "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f", - "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c", - "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5", - "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab", - "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc", - "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43", - "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da", - "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac", - "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843", - "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e", - "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89", - "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64" + "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834", + "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4", + "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714", + "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d", + "sha256:15e65395a59d2e0e96caf8ee5389ffb4604e980479c32742936ddd7ade914b22", + "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34", + "sha256:1c26da90b8d06227d7769f34915913911222d24ce08c0ab2d60b354e2d9c7aff", + "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb", + "sha256:1dd46f309e953927dd018567d6a9e2fb84783963650171f6c5fe7e5c41fd5666", + "sha256:2575efaa5d949c9f4e2cdbe7d805d02122c16065bfb8d95c129372d65a291a0b", + "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b", + "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e", + "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8", + "sha256:34a01a4490e170376cd79258b7f755fa13b1a6c3667e872c8e35051ae857a92b", + "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582", + "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34", + "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b", + "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581", + "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521", + "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8", + "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc", + "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc", + "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9", + "sha256:5039e3cef7b3e7a060de468a4a60a60a1f31786da94c6cb054e7a3c75906111c", + "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68", + "sha256:52e466bea6f8f3a44b1234570244b1cff45150f59a4acae3fcc5fd700c2993ca", + "sha256:535d4b52524a961d220875688159277f0e9eeeda0ac45e766092bfb54437543f", + "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb", + "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed", + "sha256:5c872814b77a4e84afa293a1bee08c14daed1068b2bb1cc312edbf020bbbca2b", + "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600", + "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac", + "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5", + "sha256:6b31f059878eb1f5da8b2fd82480cc18bed8dcd7fb8fe68370e2e6285fa86da6", + "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81", + "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9", + "sha256:6fe87efd7f47266dfc42fe76dae89060038f1d9cb911f89ae7e5084148d1cc08", + "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07", + "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec", + "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526", + "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766", + "sha256:7c98298a15d6b90c8f6e3caa6457f4f022423caa5fa1a1ca7a5e9e512bdb77a4", + "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67", + "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c", + "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a", + "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479", + "sha256:850720e1b383df199b8433a20e02b25b72f0fded28bc03c5bd79e2ce7ef050be", + "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213", + "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa", + "sha256:8b1a94b8afc154fbe36978a511a1f155f9bd97664e4f1f7a374d72e180ceb0ae", + "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf", + "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955", + "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952", + "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f", + "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210", + "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1", + "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd", + "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b", + "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c", + "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed", + "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55", + "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5", + "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b", + "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e", + "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b", + "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a", + "sha256:ab1932ca6cb8c7499a4d87cb21ccc0d3326f172cfb6a64021a889b591bb3045c", + "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378", + "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be", + "sha256:b0da31853ab6e58a11db3205729133ce0df26e6804e93079dee095be3d681dc1", + "sha256:b1c30841f5040de47a0046c243fc1b44ddc87d1b12435a43b8edff7e7cb1e0d0", + "sha256:b228e693a2559888790936e20f5f88b6e9f8162c681830eda303bad7517b4d5a", + "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2", + "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633", + "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d", + "sha256:bcf426a8c38eb57f7bf28932e68425ba86def6e756a5b8cb4731d8e62e4e0223", + "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa", + "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533", + "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a", + "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc", + "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248", + "sha256:cb0f6eb3a320f24b94d177e62f4074ff438f2ad9d27e75a46221904ef21a7b05", + "sha256:ce84a7efa5af9f54c0aa7692c45861c1667080814286cacb9958c07fc50294fb", + "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d", + "sha256:dab8d921b55a28287733263c0e4c7db11b3ee22aee158a4de09f13c93283c62d", + "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388", + "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9", + "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d", + "sha256:e21cc693045fda7f745c790cb687958161ce172ffe3c5719ca1764e752237d16", + "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2", + "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709", + "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0", + "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336", + "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c", + "sha256:f2afd2164a1e85226fcb6a1da77a5c8896c18bfe08e82e8ceced5181c42d2179", + "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0", + "sha256:f68eea5df6347d3f1378ce992d86b2af16ad7ff4dcb4a19ccdc23dea901b87fb", + "sha256:f757f359f30ec7dcebca662a6bd46d1098f8b9fb1fcd661a9e13f2e8ce343ba1", + "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4" ], "markers": "python_version >= '3.8'", - "version": "==0.18.1" + "version": "==0.19.0" }, "rtree": { "hashes": [ @@ -3562,6 +3602,14 @@ "markers": "python_version >= '3.7'", "version": "==1.0.1" }, + "s3fs": { + "hashes": [ + "sha256:63fd8ddf05eb722de784b7b503196107f2a518061298cf005a8a4715b4d49117", + "sha256:d1a0a423d0d2e17fb2a193d9531935dc3f45ba742693448a461b6b34f6a92a24" + ], + "markers": "python_version >= '3.8'", + "version": "==2023.6.0" + }, "s3transfer": { "hashes": [ "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084", @@ -3570,33 +3618,64 @@ "markers": "python_version >= '3.7'", "version": "==0.6.2" }, + "scikit-learn": { + "hashes": [ + "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1", + "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414", + "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1", + "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2", + "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21", + "sha256:1bd8d3a19d4bd6dc5a7d4f358c8c3a60934dc058f363c34c0ac1e9e12a31421d", + "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac", + "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915", + "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395", + "sha256:5f57428de0c900a98389c4a433d4a3cf89de979b3aa24d1c1d251802aa15e44d", + "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf", + "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745", + "sha256:7b073a27797a283187a4ef4ee149959defc350b46cbf63a84d8514fe16b69855", + "sha256:88e0672c7ac21eb149d409c74cc29f1d611d5158175846e7a9c2427bd12b3956", + "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4", + "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b", + "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b", + "sha256:b59e3e62d2be870e5c74af4e793293753565c7383ae82943b83383fdcf5cc5c1", + "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe", + "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74", + "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7" + ], + "markers": "python_version >= '3.9'", + "version": "==1.5.1" + }, "scipy": { "hashes": [ - "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415", - "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f", - "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd", - "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f", - "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d", - "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601", - "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5", - "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88", - "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f", - "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e", - "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2", - "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353", - "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35", - "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6", - "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea", - "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35", - "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1", - "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9", - "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5", - "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019", - "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1" + "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0", + "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7", + "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d", + "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0", + "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20", + "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc", + "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0", + "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159", + "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6", + "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1", + "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e", + "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f", + "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484", + "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf", + "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74", + "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8", + "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14", + "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86", + "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359", + "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b", + "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4", + "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9", + "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb", + "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209", + "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb" ], "index": "pypi", - "markers": "python_version < '3.12' and python_version >= '3.8'", - "version": "==1.10.1" + "markers": "python_version >= '3.10'", + "version": "==1.14.0" }, "seaborn": { "hashes": [ @@ -3617,11 +3696,11 @@ }, "setuptools": { "hashes": [ - "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4", - "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0" + "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5", + "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc" ], "markers": "python_version >= '3.8'", - "version": "==70.0.0" + "version": "==70.3.0" }, "shapely": { "hashes": [ @@ -3795,6 +3874,13 @@ ], "version": "==1.4.7" }, + "sortedcontainers": { + "hashes": [ + "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", + "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0" + ], + "version": "==2.4.0" + }, "soupsieve": { "hashes": [ "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690", @@ -3834,6 +3920,14 @@ "markers": "python_version >= '3.8'", "version": "==3.8.0" }, + "tblib": { + "hashes": [ + "sha256:80a6c77e59b55e83911e1e607c649836a69c103963c5f28a46cbeef44acf8129", + "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6" + ], + "markers": "python_version >= '3.8'", + "version": "==3.0.0" + }, "terminado": { "hashes": [ "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", @@ -3849,6 +3943,14 @@ ], "version": "==1.7.0" }, + "threadpoolctl": { + "hashes": [ + "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107", + "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467" + ], + "markers": "python_version >= '3.8'", + "version": "==3.5.0" + }, "tinycss2": { "hashes": [ "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d", @@ -3955,20 +4057,20 @@ }, "urllib3": { "hashes": [ - "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", - "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0" + "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3", + "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429" ], "index": "pypi", "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.18" + "version": "==1.26.19" }, "virtualenv": { "hashes": [ - "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c", - "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b" + "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a", + "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589" ], "markers": "python_version >= '3.7'", - "version": "==20.26.2" + "version": "==20.26.3" }, "wcwidth": { "hashes": [ @@ -4093,20 +4195,28 @@ }, "xarray": { "hashes": [ - "sha256:7bee552751ff1b29dab8b7715726e5ecb56691ac54593cf4881dff41978ce0cd", - "sha256:7e530b1deafdd43e5c2b577d0944e6b528fbe88045fd849e49a8d11871ecd522" + "sha256:0b91e0bc4dc0296947947640fe31ec6e867ce258d2f7cbc10bedf4a6d68340c7", + "sha256:721a7394e8ec3d592b2d8ebe21eed074ac077dc1bb1bd777ce00e41700b4866c" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2023.1.0" + "markers": "python_version >= '3.9'", + "version": "==2024.6.0" + }, + "xhistogram": { + "hashes": [ + "sha256:56b0751e1469eaed81710f644c8ba5c574b51883baa2feee26a95f2f708f91a1", + "sha256:ad55330d55296d273b3370678223fde0f50085e04cb744c7b3b0bb7702a2c6bf" + ], + "markers": "python_version >= '3.7'", + "version": "==0.3.2" }, - "xarray-spatial": { + "xskillscore": { "hashes": [ - "sha256:8a356ac66a61ff9522453194db5d184e1eccee8ab890c7ec723bb48c6eafd392", - "sha256:fd7e3b447236f6e8f8c5110684a7ca50f97149c806f56ebf4ed1eeef1dd681ce" + "sha256:39c3ffa75e2cbbde87470ad957d667e729025ac6a6077ca8efce0ca5f69bfafa", + "sha256:ce3306c60626eafea722a1522016e272ca516ce6f2447c75f92c52888939f8c2" ], "markers": "python_version >= '3.7'", - "version": "==0.3.5" + "version": "==0.0.24" }, "xyzservices": { "hashes": [ @@ -4308,6 +4418,14 @@ "markers": "python_version >= '3.9'", "version": "==2.18.0" }, + "zict": { + "hashes": [ + "sha256:5796e36bd0e0cc8cf0fbc1ace6a68912611c1dbd74750a3f3026b9b9d6a327ae", + "sha256:e321e263b6a97aafc0790c3cfb3c04656b7066e6738c37fffcca95d803c9fba5" + ], + "markers": "python_version >= '3.8'", + "version": "==3.0.0" + }, "zipp": { "hashes": [ "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19", @@ -4335,11 +4453,11 @@ }, "exceptiongroup": { "hashes": [ - "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad", - "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16" + "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", + "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" ], "markers": "python_version < '3.11'", - "version": "==1.2.1" + "version": "==1.2.2" }, "executing": { "hashes": [ @@ -4351,11 +4469,11 @@ }, "ipython": { "hashes": [ - "sha256:53eee7ad44df903a06655871cbab66d156a051fd86f3ec6750470ac9604ac1ab", - "sha256:c6ed726a140b6e725b911528f80439c534fac915246af3efc39440a6b0f9d716" + "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c", + "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff" ], "markers": "python_version >= '3.10'", - "version": "==8.25.0" + "version": "==8.26.0" }, "jedi": { "hashes": [ diff --git a/config/aws_s3_put_fim3_hydrovis_whitelist.lst b/config/aws_s3_put_fim3_hydrovis_whitelist.lst deleted file mode 100644 index 5d8ed5ca7..000000000 --- a/config/aws_s3_put_fim3_hydrovis_whitelist.lst +++ /dev/null @@ -1,4 +0,0 @@ -hydroTable{}.csv -gw_catchments_reaches_filtered_addedAttributes{}.tif -rem_zeroed_masked{}.tif -usgs_elev_table.csv diff --git a/config/deny_branch_zero.lst b/config/deny_branch_zero.lst index f4bf2cba9..409c306ff 100644 --- a/config/deny_branch_zero.lst +++ b/config/deny_branch_zero.lst @@ -56,7 +56,7 @@ slopes_d8_dem_meters_{}.tif slopes_d8_dem_meters_masked_{}.tif sn_catchments_reaches_{}.tif src_{}.json -src_base_{}.csv +#src_base_{}.csv #src_full_crosswalked_{}.csv stage_{}.txt streamOrder_{}.tif diff --git a/config/deny_branches.lst b/config/deny_branches.lst index 07ef1f7f4..a44a79a49 100644 --- a/config/deny_branches.lst +++ b/config/deny_branches.lst @@ -58,7 +58,7 @@ slopes_d8_dem_meters_{}.tif slopes_d8_dem_meters_masked_{}.tif sn_catchments_reaches_{}.tif src_{}.json -src_base_{}.csv +# src_base_{}.csv # src_full_crosswalked_{}.csv stage_{}.txt streamOrder_{}.tif diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 027fdc448..f4b3d68bf 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,112 @@ All notable changes to this project will be documented in this file. We follow the [Semantic Versioning 2.0.0](http://semver.org/) format. + +## v4.5.3.1 - 2024-07-24 - [PR#1233](https://github.com/NOAA-OWP/inundation-mapping/pull/1233) + +In a PR [1217](https://github.com/NOAA-OWP/inundation-mapping/pull/1217), which is about to be merged, it updates a bunch of python packages. One is numpy. This has triggered a very large amount of on-screen output from a new numpy warning while running `synthesize_test_cases.py`. + +### Changes +- `tools\overlapping_inundation.py`: As described + +

+ + +## v4.5.3.0 - 2024-07-24 - [PR#1217](https://github.com/NOAA-OWP/inundation-mapping/pull/1217) + +This PR rolls up a bunch of other PR's and python packages requests including: +- Issue [1208](https://github.com/NOAA-OWP/inundation-mapping/issues/1208) Bump OpenJDK from 17.0.8 to 17.0.10 (via updating to JDK 21.0.3) +- PR [1207](https://github.com/NOAA-OWP/inundation-mapping/pull/1207) - Dependabot bump certifi from 2023.7.22 to 2024.7.4 +- PR [1192](https://github.com/NOAA-OWP/inundation-mapping/pull/1192) - Dependabot Bump urllib3 from 1.26.18 to 1.26.19 +- Updates required from ongoing PR [1206](https://github.com/NOAA-OWP/inundation-mapping/pull/1206) - Probabilistic Flood Inundation Mapping. These updates make it easier for that branch/task to continue forward and staying in sync with dev. This triggered a few other packages that needed to be updated. + +Other tasks included are: +- Removing the now heavily obsolete `unit_test` system, including the package `pytest`. This included some changes to the `CONTRIBUTING.md` document. +- Clean of a couple of packages no longer in use: `pluggy` and `iniconfig` +- Removal of a deprecated file named `config/aws_s3_put_fim3_hydrovis_whitelist.lst` +- Removed duration stamps around a few parts in `fim_post_processing.sh` +- Fixes and updates to linting files. e.g. `pyproject.toml`. (line length was not working correctly) + +### Changes +- `Dockerfile`, `Pipfile`, `Pipfile.lock`: as describe above for python package changes +- `.gitignore`, `CONTRIBUTING.md`: File changes related to removing the `unit_test` system. +- `fim_post_processing.sh`: noted above. +- `pyproject.toml`: fixes and updates for linting + +### Removals +- `unit_tests` folder and all related files under it. Appx 25 to 30 files removed. + +

+ + +## v4.5.2.11 - 2024-07-19 - [PR#1222](https://github.com/NOAA-OWP/inundation-mapping/pull/1222) + +We are having problems with post processing overall duration taking a long time. This new system captures duration times for each module/section inside fim_post_processing.sh and records it to a file on the output directory. It records it as it progress and will also help us learn if fim_post_processing.sh stopped along the way. + +Note: When used in code, we call `Set_log_file_path` shell variable with a file name and path (no validation done at this time). The each time a person wants to print to screen and console, use the `l_echo` command instead of the native `echo` command. If the log file has not been set, the output will continue to go to screen, just not the log file. + +### Changes +- `fim_pipeline.sh`: A couple of minor text output changes. +- `fim_post_processing.sh`: As described above. +- `src\bash_functions.env`: New functions and adjustments to support the new log system. + +

+ + +## v4.5.2.10 - 2024-07-19 - [PR#1224](https://github.com/NOAA-OWP/inundation-mapping/pull/1224) + +Addresses warnings to reduce output messages. + +### Changes + +- `src/' + - `adjust_thalweg_lateral.py`: fixes number type + - `src/delineate_hydros_and_produce_HAND.sh`: removes division by zero warning + - `getRasterInfoNative.py`: adds `gdal.UseExceptions()` + +

+ + +## v4.5.2.9 - 2024-07-19 - [PR#1216](https://github.com/NOAA-OWP/inundation-mapping/pull/1216) + +Adds `NO_VALID_CROSSWALKS` to `FIM_exit_codes` which is used when the crosswalk table or output_catchments DataFrame is empty. Removes branches that fail with `NO_VALID_CROSSWALKS`. + +### Changes + - `add_crosswalk.py`: Added `NO_VALID_CROSSWALKS` as exit status when crosswalk or output_catchments is empty + - `process_branch.sh`: Removed branches that fail with `NO_VALID_CROSSWALKS` + - `utils/fim_enums.py`: Added `NO_VALID_CROSSWALKS` to `FIM_exit_codes` + +

+ + +## v4.5.2.8 - 2024-07-19 - [PR#1219](https://github.com/NOAA-OWP/inundation-mapping/pull/1219) + +Changes non-fatal `ERROR` messages to `WARNINGS` to avoid triggering being logged as errors. + +### Changes + +- `src/` + - `bathymetric_adjustment.py`: Changes `WARNING` to `ERROR` in Exception + - `src_roughness_optimization.py`: Changes `ERROR` messages to `WARNING` + +

+ +## v4.5.2.7 - 2024-07-19 - [PR#1220](https://github.com/NOAA-OWP/inundation-mapping/pull/1220) + +With this PR we can run post_processing.sh multiple times on a processed batch without any concerns that it may change the hydroTable or src_full_crosswalked files. + +### Additions + +- `src/update_htable_src.py` + +### Changes + +- `config/deny_branch_zero.lst` +- `config/deny_branches.lst` +- `fim_post_processing.sh` + +

+ ## v4.5.2.6 - 2024-07-12 - [PR#1184](https://github.com/NOAA-OWP/inundation-mapping/pull/1184) This PR adds a new script to determine which bridges are inundated by a specific flow. It will assign a risk status to each bridge point based on a specific threshold. @@ -11,7 +117,6 @@ This PR adds a new script to determine which bridges are inundated by a specific

- ## v4.5.2.5 - 2024-07-08 - [PR#1205](https://github.com/NOAA-OWP/inundation-mapping/pull/1205) Snaps crosswalk from the midpoint of DEM-derived reaches to the nearest point on NWM streams within a threshold of 100 meters. DEM-derived streams that do not locate any NWM streams within 100 meters of their midpoints are removed from the FIM hydrofabric and their catchments are not inundated. @@ -22,7 +127,6 @@ Snaps crosswalk from the midpoint of DEM-derived reaches to the nearest point on

- ## v4.5.2.4 - 2024-07-08 - [PR#1204](https://github.com/NOAA-OWP/inundation-mapping/pull/1204) Bug fix for extending outlets in order to ensure proper flow direction in depression filling algorithm. This PR adds a distance criteria that in order for the end of an outlet stream to be snapped to the wbd_buffered boundary, the end point must be less than 100 meters from the WBD boundary. diff --git a/fim_pipeline.sh b/fim_pipeline.sh index 8fc4ae9d7..16139fdc7 100755 --- a/fim_pipeline.sh +++ b/fim_pipeline.sh @@ -116,6 +116,7 @@ echo echo "---- Unit (HUC) processing is complete" date -u Calc_Duration $pipeline_start_time +echo "---------------------------------------------------" ## POST PROCESSING @@ -126,8 +127,10 @@ rm -d $workDir/$runName . $projectDir/fim_post_processing.sh -n $runName -j $jobMaxLimit echo -echo "======================== End of fim_pipeline.sh ==========================" + +echo "======================== End of fim_pipeline for $runName ==========" date -u +echo "Total Duration is ..." Calc_Duration $pipeline_start_time echo diff --git a/fim_post_processing.sh b/fim_post_processing.sh index 2a7965efb..e17850cb9 100755 --- a/fim_post_processing.sh +++ b/fim_post_processing.sh @@ -73,6 +73,7 @@ if [ "$jobLimit" = "" ]; then jobLimit=1; fi rm -rdf $outputDestDir/logs/src_optimization rm -f $outputDestDir/logs/log_bankfull_indentify.log rm -f $outputDestDir/logs/subdiv_src_.log +rm -f $log_file_name # load up enviromental information args_file=$outputDestDir/runtime_args.env @@ -83,33 +84,85 @@ source $outputDestDir/params.env source $srcDir/bash_functions.env source $srcDir/bash_variables.env -echo +# Tell the system the name and location of the post processing log +log_file_name=$outputDestDir/post_proc.log +Set_log_file_path $log_file_name + +l_echo "" echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" -echo "---- Start of fim_post_processing" -echo "---- Started: `date -u`" +l_echo "---- Start of fim_post_processing" +l_echo "---- Started: `date -u`" T_total_start post_proc_start_time=`date +%s` +## RUN UPDATE HYDROTABLE AND SRC ## +# Define the counter file + +Tstart +COUNTER_FILE="${outputDestDir}/post_processing_attempt.txt" +# Function to clean up +cleanup() { + if [ "$SUCCESS" = true ]; then + if [ -f "$COUNTER_FILE" ]; then + COUNTER=$(cat "$COUNTER_FILE") + if [ "$COUNTER" -eq 1 ]; then + l_echo "Counter is 1. Removing the counter file." + rm "$COUNTER_FILE" + fi + fi + fi +} + +# Set up trap to call cleanup on EXIT, ERR, and INT (interrupt signal) +trap cleanup EXIT ERR INT +# Initialize the counter file if it doesn't exist +if [ ! -f "$COUNTER_FILE" ]; then + echo 0 > "$COUNTER_FILE" +fi + +# Read the current counter value +COUNTER=$(cat "$COUNTER_FILE") + +# Increment the counter +COUNTER=$((COUNTER + 1)) + +# Save the new counter value +l_echo "$COUNTER" > "$COUNTER_FILE" + +# Check if the counter is greater than one +if [ "$COUNTER" -gt 1 ]; then + # Execute the Python file + l_echo "Updating hydroTable & scr_full_crosswalked for branches" + python3 $srcDir/update_htable_src.py -d $outputDestDir +else + l_echo "Execution count is $COUNTER, not executing the update_htable_src.py file." +fi +Tcount + ## AGGREGATE BRANCH LISTS INTO ONE ## -echo -e $startDiv"Start branch aggregation" +l_echo $startDiv"Start branch aggregation" +Tstart python3 $srcDir/aggregate_branch_lists.py -d $outputDestDir -f "branch_ids.csv" -o $fim_inputs +Tcount ## GET NON ZERO EXIT CODES FOR BRANCHES ## -echo -e $startDiv"Start non-zero exit code checking" +l_echo $startDiv"Start non-zero exit code checking" find $outputDestDir/logs/branch -name "*_branch_*.log" -type f | \ xargs grep -E "Exit status: ([1-9][0-9]{0,2})" > \ "$outputDestDir/branch_errors/non_zero_exit_codes.log" & ## RUN AGGREGATE BRANCH ELEV TABLES ## -echo "Processing usgs & ras2fim elev table aggregation" +l_echo $startDiv"Processing usgs & ras2fim elev table aggregation" +Tstart python3 $srcDir/aggregate_by_huc.py -fim $outputDestDir -i $fim_inputs -elev -ras -j $jobLimit +Tcount ## RUN BATHYMETRY ADJUSTMENT ROUTINE ## if [ "$bathymetry_adjust" = "True" ]; then - echo -e $startDiv"Performing Bathymetry Adjustment routine" - # Run bathymetry adjustment routine + l_echo $startDiv"Performing Bathymetry Adjustment routine" Tstart + # Run bathymetry adjustment routine python3 $srcDir/bathymetric_adjustment.py \ -fim_dir $outputDestDir \ -bathy $bathymetry_file \ @@ -121,9 +174,9 @@ fi ## RUN SYNTHETIC RATING CURVE BANKFULL ESTIMATION ROUTINE ## if [ "$src_bankfull_toggle" = "True" ]; then - echo -e $startDiv"Estimating bankfull stage in SRCs" - # Run SRC bankfull estimation routine routine + l_echo $startDiv"Estimating bankfull stage in SRCs" Tstart + # Run SRC bankfull estimation routine routine python3 $srcDir/identify_src_bankfull.py \ -fim_dir $outputDestDir \ -flows $bankfull_flows_file \ @@ -133,7 +186,7 @@ fi ## RUN SYNTHETIC RATING SUBDIVISION ROUTINE ## if [ "$src_subdiv_toggle" = "True" ] && [ "$src_bankfull_toggle" = "True" ]; then - echo -e $startDiv"Performing SRC channel/overbank subdivision routine" + l_echo $startDiv"Performing SRC channel/overbank subdivision routine" # Run SRC Subdivision & Variable Roughness routine Tstart python3 $srcDir/subdiv_chan_obank_src.py \ @@ -146,8 +199,7 @@ fi ## RUN SYNTHETIC RATING CURVE CALIBRATION W/ USGS GAGE RATING CURVES ## if [ "$src_adjust_usgs" = "True" ] && [ "$src_subdiv_toggle" = "True" ] && [ "$skipcal" = "0" ]; then Tstart - echo - echo -e $startDiv"Performing SRC adjustments using USGS rating curve database" + l_echo $startDiv"Performing SRC adjustments using USGS rating curve database" # Run SRC Optimization routine using USGS rating curve data (WSE and flow @ NWM recur flow values) python3 $srcDir/src_adjust_usgs_rating_trace.py \ -run_dir $outputDestDir \ @@ -161,8 +213,7 @@ fi ## RUN SYNTHETIC RATING CURVE CALIBRATION W/ RAS2FIM CROSS SECTION RATING CURVES ## if [ "$src_adjust_ras2fim" = "True" ] && [ "$src_subdiv_toggle" = "True" ] && [ "$skipcal" = "0" ]; then Tstart - echo - echo -e $startDiv"Performing SRC adjustments using ras2fim rating curve database" + l_echo $startDiv"Performing SRC adjustments using ras2fim rating curve database" # Run SRC Optimization routine using ras2fim rating curve data (WSE and flow @ NWM recur flow values) python3 $srcDir/src_adjust_ras2fim_rating.py \ -run_dir $outputDestDir \ @@ -176,16 +227,14 @@ fi ## RUN SYNTHETIC RATING CURVE CALIBRATION W/ BENCHMARK POINTS (.parquet files) ## if [ "$src_adjust_spatial" = "True" ] && [ "$src_subdiv_toggle" = "True" ] && [ "$skipcal" = "0" ]; then Tstart - echo - echo -e $startDiv"Performing SRC adjustments using benchmark point .parquet files" + l_echo $startDiv"Performing SRC adjustments using benchmark point .parquet files" python3 $srcDir/src_adjust_spatial_obs.py -fim_dir $outputDestDir -j $jobLimit Tcount date -u fi ## AGGREGATE BRANCH TABLES ## -echo -echo -e $startDiv"Aggregating branch hydrotables" +l_echo $startDiv"Aggregating branch hydrotables" Tstart python3 $srcDir/aggregate_by_huc.py \ -fim $outputDestDir \ @@ -198,8 +247,7 @@ date -u ## PERFORM MANUAL CALIBRATION if [ "$manual_calb_toggle" = "True" ] && [ -f $man_calb_file ]; then - echo - echo -e $startDiv"Performing manual calibration" + l_echo $startDiv"Performing manual calibration" Tstart python3 $srcDir/src_manual_calibration.py \ -fim_dir $outputDestDir \ @@ -208,9 +256,8 @@ if [ "$manual_calb_toggle" = "True" ] && [ -f $man_calb_file ]; then date -u fi -echo -echo -e $startDiv"Combining crosswalk tables" -# aggregate outputs + +l_echo $startDiv"Combining crosswalk tables" Tstart python3 $toolsDir/combine_crosswalk_tables.py \ -d $outputDestDir \ @@ -218,25 +265,30 @@ python3 $toolsDir/combine_crosswalk_tables.py \ Tcount date -u -echo -e $startDiv"Resetting Permissions" + +l_echo $startDiv"Resetting Permissions" Tstart find $outputDestDir -type d -exec chmod -R 777 {} + find $outputDestDir -type f -exec chmod 777 {} + # just root level files Tcount -date -u -echo -echo -e $startDiv"Scanning logs for errors. Results be saved in root not inside the log folder." + +l_echo $startDiv"Scanning logs for errors and warnings. This can take quite a few minutes so stand by." +echo "Results will be saved in root not inside the log folder." Tstart # grep -H -r -i -n "error" $outputDestDir/logs/ > $outputDestDir/all_errors_from_logs.log - find $outputDestDir -type f | grep -H -r -i -n "error" $outputDestDir/logs/ > $outputDestDir/all_errors_from_logs.log -Tcount -date -u + find $outputDestDir -type f | grep -H -r -i -n "error" $outputDestDir/logs/ > \ + $outputDestDir/all_errors_from_logs.log & + l_echo "error scan done, now on to warnings scan" + find $outputDestDir -type f | grep -H -r -i -n "warning" $outputDestDir/logs/ > \ + $outputDestDir/all_warnings_from_logs.log & + l_echo "warning scan done" +Tcount echo -echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" -echo "---- End of fim_post_processing" -echo "---- Ended: `date -u`" +l_echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" +l_echo "---- End of fim_post_processing" +l_echo "---- Ended: `date -u`" Calc_Duration $post_proc_start_time echo diff --git a/fim_pre_processing.sh b/fim_pre_processing.sh index 0846b522e..d89c554ce 100755 --- a/fim_pre_processing.sh +++ b/fim_pre_processing.sh @@ -122,6 +122,7 @@ fi # outputsDir & workDir come from the Dockerfile outputDestDir=$outputsDir/$runName tempRunDir=$workDir/$runName +export WBT_PATH=${tempRunDir}/whitebox_temp # default values if [ "$envFile" = "" ]; then envFile=/$projectDir/config/params_template.env; fi @@ -214,6 +215,7 @@ else rm -f $outputDestDir/*.env fi + mkdir -p $outputDestDir/logs/unit mkdir -p $outputDestDir/logs/branch mkdir -p $outputDestDir/unit_errors diff --git a/pyproject.toml b/pyproject.toml index ac0432b03..ee3862fda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,17 +15,13 @@ maintainers = [ {name = "Fernando Salas", email = "fernando.salas@noaa.gov"}, {name = "Carson Pruitt", email = "carson.pruitt@noaa.gov"}, {name = "Ali Forghani", email = "ali.forghani@noaa.gov"}, - {name = "Bradford Bates", email = "bradford.bates@noaa.gov"}, {name = "Emily Deardorff", email = "emily.deardorff@noaa.gov"}, - {name = "Fernando Aristizabal", email = "fernando.aristizabal@noaa.gov"}, {name = "Greg Petrochenkov", email = "greg.petrochenkov@noaa.gov"}, {name = "Hamideh Safa", email = "hamideh.safa@noaa.gov"}, {name = "James Coll", email = "james.coll@noaa.gov"}, {name = "Matt Luck", email = "matt.luck@noaa.gov"}, - {name = "Laura Keys", email = "laura.keys@noaa.gov"}, {name = "Nick Chadwick", email = "nick.chadwick@noaa.gov"}, {name = "Riley McDermott", email = "riley.mcdermott@noaa.gov"}, - {name = "Rob Gonzalez-Pita", email = "rob.g.pita@noaa.gov"}, {name = "Robert Hanna", email = "robert.hanna@noaa.gov"}, {name = "Ryan Spies", email = "ryan.spies@noaa.gov"} ] @@ -51,14 +47,14 @@ skip-magic-trailing-comma = true [tool.isort] profile = 'black' multi_line_output = 3 -line_length = 110 # It should be the same as `tool.black.line-length` above +line_length = 110 lines_after_imports = 2 [tool.flake8] count = true doctests = true max-complexity = 56 -max-line-length = 110 # It should be the same as `tool.black.line-length` above +max-line-length = 110 extend-ignore = """ E203, E266, diff --git a/src/add_crosswalk.py b/src/add_crosswalk.py index 62789c9b3..61622b2e3 100755 --- a/src/add_crosswalk.py +++ b/src/add_crosswalk.py @@ -9,6 +9,7 @@ from numpy import unique from rasterstats import zonal_stats +from utils.fim_enums import FIM_exit_codes from utils.shared_functions import getDriver from utils.shared_variables import FIM_ID @@ -87,14 +88,18 @@ def add_crosswalk( crosswalk = crosswalk.filter(items=['HydroID', 'feature_id', 'distance']) crosswalk = crosswalk.merge(input_nwmflows[['order_']], on='feature_id') - if len(crosswalk) < 1: + if crosswalk.empty: print("No relevant streams within HUC boundaries.") - sys.exit(0) + sys.exit(FIM_exit_codes.NO_VALID_CROSSWALKS.value) if input_catchments.HydroID.dtype != 'int': input_catchments.HydroID = input_catchments.HydroID.astype(int) output_catchments = input_catchments.merge(crosswalk, on='HydroID') + if output_catchments.empty: + print("No valid catchments remain.") + sys.exit(FIM_exit_codes.NO_VALID_CROSSWALKS.value) + if input_flows.HydroID.dtype != 'int': input_flows.HydroID = input_flows.HydroID.astype(int) output_flows = input_flows.merge(crosswalk, on='HydroID') diff --git a/src/adjust_thalweg_lateral.py b/src/adjust_thalweg_lateral.py index 5a55b61fc..d14582e22 100755 --- a/src/adjust_thalweg_lateral.py +++ b/src/adjust_thalweg_lateral.py @@ -24,7 +24,7 @@ def make_zone_min_dict(elevation_window, zone_min_dict, zone_window, cost_window for i, elev_m in enumerate(zone_window): # If the zone really exists in the dictionary, compare elevation values. i = int(i) - elev_m = int(elev_m) + elev_m = types.int32(elev_m) if cost_window[i] <= cost_tolerance: if elevation_window[i] > 0: # Don't allow bad elevation values @@ -47,7 +47,7 @@ def minimize_thalweg_elevation(dem_window, zone_min_dict, zone_window, thalweg_w for i, elev_m in enumerate(zone_window): i = int(i) - elev_m = int(elev_m) + elev_m = types.int32(elev_m) thalweg_cell = thalweg_window[i] # From flows_grid_boolean.tif (0s and 1s) if thalweg_cell == 1: # Make sure thalweg cells are checked. if elev_m in zone_min_dict: diff --git a/src/bash_functions.env b/src/bash_functions.env index 72a33f450..e8452003a 100644 --- a/src/bash_functions.env +++ b/src/bash_functions.env @@ -1,5 +1,44 @@ #!/bin/bash + +## Simple logging system + +# If this value -nq "", then functions in this file will be printed to +# console and log file. +log_file_path="" + +# If this is set, all functions will set to a file as well as screen +# Yes. it needs some validation that the log_file_path is a valid +# file name and path. +Set_log_file_path () { + log_file_path=$1 + + if [ ! -f $log_file_path ]; then + touch $log_file_path + else + rm -f $log_file_path + fi +} + +# to stop other calls to make log calls to this all +Clear_log_file_path () { + + log_file_path="" +} + + +# if the log_file_path has been set, this will print to screen and add to log file +l_echo () { + local msg=$1 + + if [[ $log_file_path != "" ]]; then + echo -e $msg ; echo -e $msg >> $log_file_path + else + echo -e $msg + fi +} + + ## Timing functions T_total_start () { @@ -10,15 +49,25 @@ Tstart () { t1=`date +%s` } +# This one is based on the value of t1 created earlier, and end is now Tcount () { t2=`date +%s` local total_sec=$(( $t2 - $t1)) local dur_min=$((total_sec / 60)) local dur_remainder_sec=$((total_sec % 60)) - echo "Cumulative Time = $dur_min min(s) and $dur_remainder_sec sec" + + local msg="Duration = $dur_min min(s) and $dur_remainder_sec sec" + # if an arg comes in and is a file path, we can display and record to a file + if [[ $log_file_path != "" ]]; then + echo -e $msg ; echo -e $msg >> $log_file_path + else + echo -e $msg + fi + } +# This expects a start time submitted as an arg and assumes the end time as now Calc_Duration() { local start_time=$1 local end_time=`date +%s` @@ -26,9 +75,17 @@ Calc_Duration() { local total_sec=$(( $end_time - $start_time )) local dur_min=$((total_sec / 60)) local dur_remainder_sec=$((total_sec % 60)) - echo "Duration = $dur_min min(s) and $dur_remainder_sec sec" + + local msg="Duration : $dur_min min(s) and $dur_remainder_sec sec" + # if an arg comes in and is a file path, we can display and record to a file + if [[ $log_file_path != "" ]]; then + echo -e $msg ; echo -e $msg >> $log_file_path + else + echo -e $msg + fi } + Calc_Time() { local start_time=$1 local end_time=`date +%s` @@ -41,6 +98,7 @@ Calc_Time() { else echo "$dur_min:$dur_remainder_sec" fi + } Calc_Time_Minutes_in_Percent(){ diff --git a/src/bathymetric_adjustment.py b/src/bathymetric_adjustment.py old mode 100644 new mode 100755 index 9c114173b..6e1fe687c --- a/src/bathymetric_adjustment.py +++ b/src/bathymetric_adjustment.py @@ -11,7 +11,8 @@ import geopandas as gpd import pandas as pd -from synthesize_test_cases import progress_bar_handler + +from utils.shared_functions import progress_bar_handler def correct_rating_for_bathymetry(fim_dir, huc, bathy_file, verbose): @@ -191,13 +192,13 @@ def multi_process_hucs(fim_dir, bathy_file, wbd_buffer, wbd, output_suffix, numb executor_dict[future] = huc # Send the executor to the progress bar and wait for all tasks to finish - progress_bar_handler(executor_dict, True, f"Running BARC on {len(hucs)} HUCs") + progress_bar_handler(executor_dict, f"Running BARC on {len(hucs)} HUCs") # Get the returned logs and write to the log file for future in executor_dict.keys(): try: log_file.write(future.result()) except Exception as ex: - print(f"WARNING: {executor_dict[future]} BARC failed for some reason") + print(f"ERROR: {executor_dict[future]} BARC failed for some reason") log_file.write(f"ERROR --> {executor_dict[future]} BARC failed (details: *** {ex} )\n") traceback.print_exc(file=log_file) diff --git a/src/delineate_hydros_and_produce_HAND.sh b/src/delineate_hydros_and_produce_HAND.sh index 77b64b7f5..2c827cc57 100755 --- a/src/delineate_hydros_and_produce_HAND.sh +++ b/src/delineate_hydros_and_produce_HAND.sh @@ -62,7 +62,7 @@ echo -e $startDiv"Mask Burned DEM for Thalweg Only $hucNumber $current_branch_id gdal_calc.py --quiet --type=Int32 --overwrite --co "COMPRESS=LZW" --co "BIGTIFF=YES" --co "TILED=YES" \ -A $tempCurrentBranchDataDir/flowdir_d8_burned_filled_$current_branch_id.tif \ -B $tempCurrentBranchDataDir/demDerived_streamPixels_$current_branch_id.tif \ - --calc="A/B" \ + --calc="A*B" \ --outfile="$tempCurrentBranchDataDir/flowdir_d8_burned_filled_flows_$current_branch_id.tif" \ --NoDataValue=0 diff --git a/src/getRasterInfoNative.py b/src/getRasterInfoNative.py index e3b8a69dc..2ebc5f8c8 100755 --- a/src/getRasterInfoNative.py +++ b/src/getRasterInfoNative.py @@ -9,6 +9,9 @@ from osgeo import gdal, osr +gdal.UseExceptions() + + """ read fsize ncols nrows ndv xmin ymin xmax ymax cellsize_resx cellsize_resy <<< $(./getRasterInfoNative.py ) diff --git a/src/process_branch.sh b/src/process_branch.sh index edfa2aa8e..7a85a5627 100755 --- a/src/process_branch.sh +++ b/src/process_branch.sh @@ -38,6 +38,11 @@ do err_exists=1 echo "***** Branch has no valid flowlines *****" rm -rf $tempHucDataDir/branches/$branchId/ + elif [ $code -eq 64 ]; then + echo + err_exists=1 + echo "***** Branch has no crosswalks *****" + rm -rf $tempHucDataDir/branches/$branchId/ elif [ $code -ne 0 ]; then echo err_exists=1 diff --git a/src/src_roughness_optimization.py b/src/src_roughness_optimization.py index 7f3864875..3f33e909c 100644 --- a/src/src_roughness_optimization.py +++ b/src/src_roughness_optimization.py @@ -128,7 +128,7 @@ def update_rating_curve( elif source_tag == 'ras2fim_rating': calb_type = 'calb_coef_ras2fim' else: - log_text += "ERROR - unknown calibration data source type: " + str(source_tag) + '\n' + log_text += "WARNING - unknown calibration data source type: " + str(source_tag) + '\n' ## Read in the hydroTable.csv and check wether it has previously been updated # (rename default columns if needed) @@ -202,7 +202,7 @@ def update_rating_curve( for index, row in df_nvalues.iterrows(): if row.hydroid not in df_htable['HydroID'].values: print( - 'ERROR: HydroID for calb point was not found in the hydrotable (check hydrotable) for HUC: ' + 'WARNING: HydroID for calb point was not found in the hydrotable (check hydrotable) for HUC: ' + str(huc) + ' branch id: ' + str(branch_id) @@ -210,7 +210,7 @@ def update_rating_curve( + str(row.hydroid) ) log_text += ( - 'ERROR: HydroID for calb point was not found in the hydrotable (check hydrotable) for HUC: ' + 'WARNING: HydroID for calb point was not found in the hydrotable (check hydrotable) for HUC: ' + str(huc) + ' branch id: ' + str(branch_id) @@ -224,7 +224,7 @@ def update_rating_curve( df_htable_hydroid = df_htable[(df_htable.HydroID == row.hydroid) & (df_htable.stage > 0)] if df_htable_hydroid.empty: print( - 'ERROR: df_htable_hydroid is empty but expected data: ' + 'WARNING: df_htable_hydroid is empty but expected data: ' + str(huc) + ' branch id: ' + str(branch_id) @@ -232,7 +232,7 @@ def update_rating_curve( + str(row.hydroid) ) log_text += ( - 'ERROR: df_htable_hydroid is empty but expected data: ' + 'WARNING: df_htable_hydroid is empty but expected data: ' + str(huc) + ' branch id: ' + str(branch_id) @@ -257,13 +257,13 @@ def update_rating_curve( if 'discharge_cms' not in df_nvalues: print( - 'ERROR: "discharge_cms" column does not exist in df_nvalues df: ' + 'WARNING: "discharge_cms" column does not exist in df_nvalues df: ' + str(huc) + ' branch id: ' + str(branch_id) ) log_text += ( - 'ERROR: "discharge_cms" column does not exist in df_nvalues df: ' + 'WARNING: "discharge_cms" column does not exist in df_nvalues df: ' + str(huc) + ' branch id: ' + str(branch_id) diff --git a/src/update_htable_src.py b/src/update_htable_src.py new file mode 100644 index 000000000..e2828afd6 --- /dev/null +++ b/src/update_htable_src.py @@ -0,0 +1,83 @@ +import argparse +import os +import re + +import geopandas as gpd +import pandas as pd + + +def process_branch(sub_branch_path, branch): + src_base_file = os.path.join(sub_branch_path, f'src_base_{branch}.csv') + hydro_table_file = os.path.join(sub_branch_path, f'hydroTable_{branch}.csv') + src_full_file = os.path.join(sub_branch_path, f'src_full_crosswalked_{branch}.csv') + input_flows_file = os.path.join( + sub_branch_path, f'demDerived_reaches_split_filtered_addedAttributes_crosswalked_{branch}.gpkg' + ) + + input_src_base = pd.read_csv(src_base_file, dtype=object) + input_src_full = pd.read_csv(src_full_file, dtype=object) + input_hydro_table = pd.read_csv(hydro_table_file, dtype=object) + input_flows = gpd.read_file(input_flows_file, engine="pyogrio", use_arrow=True) + + input_src_base = input_src_base.merge( + input_flows[['ManningN', 'HydroID', 'NextDownID', 'order_']], left_on='CatchId', right_on='HydroID' + ) + + # Update src_full + input_src_base = input_src_base.rename(columns=lambda x: x.strip(" ")) + input_src_base = input_src_base.apply(pd.to_numeric, **{'errors': 'coerce'}) + input_src_full['Volume (m3)'] = input_src_base['Volume (m3)'] + input_src_full['BedArea (m2)'] = input_src_base['BedArea (m2)'] + input_src_full['TopWidth (m)'] = input_src_base['SurfaceArea (m2)'] / input_src_base['LENGTHKM'] / 1000 + input_src_full['WettedPerimeter (m)'] = input_src_base['BedArea (m2)'] / input_src_base['LENGTHKM'] / 1000 + input_src_full['WetArea (m2)'] = input_src_base['Volume (m3)'] / input_src_base['LENGTHKM'] / 1000 + input_src_full['HydraulicRadius (m)'] = ( + input_src_full['WetArea (m2)'] / input_src_full['WettedPerimeter (m)'] + ) + input_src_full['HydraulicRadius (m)'].fillna(0, inplace=True) + input_src_full['Discharge (m3s-1)'] = ( + input_src_full['WetArea (m2)'] + * pow(input_src_full['HydraulicRadius (m)'], 2.0 / 3) + * pow(input_src_base['SLOPE'], 0.5) + / input_src_base['ManningN'] + ) + input_src_full['Bathymetry_source'] = pd.NA + input_src_full = input_src_full.iloc[:, :19] + + # Update hydroTable + input_hydro_table['subdiv_discharge_cms'] = pd.NA + input_hydro_table['discharge_cms'] = input_hydro_table['default_discharge_cms'] + + # Save updated files + input_src_full.to_csv(src_full_file, index=False) + input_hydro_table.to_csv(hydro_table_file, index=False) + + +def reset_hydro_and_src(fim_dir): + hucs = [h for h in os.listdir(fim_dir) if re.match(r'^\d{8}$', h)] + for huc_folder in hucs: + huc_path = os.path.join(fim_dir, huc_folder) + if os.path.isdir(huc_path): + for branch_folder in os.listdir(huc_path): + branch_path = os.path.join(huc_path, branch_folder) + if os.path.isdir(branch_path): + for branch in os.listdir(branch_path): + sub_branch_path = os.path.join(branch_path, branch) + if os.path.isdir(sub_branch_path): + process_branch(sub_branch_path, branch) + + +# Example usage: +# reset_hydro_and_src('/path/to/fim_dir') +if __name__ == '__main__': + ''' + Sample usage (min params): + python3 src/update_htable_src.py + -d /data/previous_fim/fim_4_5_2_0 + ''' + parser = argparse.ArgumentParser(description='Update hydrotable and src files.') + parser.add_argument('-d', '--fim_dir', help='Directory path for fim_pipeline output.', required=True) + + args = parser.parse_args() + + reset_hydro_and_src(args.fim_dir) diff --git a/src/utils/fim_enums.py b/src/utils/fim_enums.py index 7524e0277..cee14f63d 100644 --- a/src/utils/fim_enums.py +++ b/src/utils/fim_enums.py @@ -29,3 +29,4 @@ class FIM_exit_codes(Enum): NO_FLOWLINES_EXIST = 61 EXCESS_UNIT_ERRORS = 62 NO_BRANCH_LEVELPATHS_EXIST = 63 + NO_VALID_CROSSWALKS = 64 diff --git a/tools/overlapping_inundation.py b/tools/overlapping_inundation.py index 34c0e1dad..fbb1bcc50 100644 --- a/tools/overlapping_inundation.py +++ b/tools/overlapping_inundation.py @@ -141,10 +141,10 @@ def get_window_coords(self): # Get window sizes (both normal and edge windows) window_bounds1 = np.flip( np.array(np.meshgrid(window_width1, window_height1)).T.reshape(-1, 2), axis=1 - ).astype(np.int) + ).astype(np.int64) window_bounds2 = np.flip( np.array(np.meshgrid(window_width2, window_height2)).T.reshape(-1, 2), axis=1 - ).astype(np.int) + ).astype(np.int64) window_idx = np.array(np.unravel_index(np.arange(y_res * x_res), (y_res, x_res), order="F")) @@ -224,7 +224,7 @@ def read_rst_data(self, win_idx, datasets, path_points, bbox, meta): window = path_points[win_idx] window_height, window_width = np.array( [np.abs(bbox[win_idx][2] - bbox[win_idx][0]), np.abs(bbox[win_idx][3] - bbox[win_idx][1])] - ).astype(np.int) + ).astype(np.int64) bnds = [] data = [] @@ -396,7 +396,7 @@ def merge_data( :param rst_dims: dimensions of overlapping rasters """ - nan_tile = np.array([np.nan]).astype(dtype)[0] + nan_tile = np.array([np.nan]) window_data = np.tile(float(nan_tile), [int(final_window.height), int(final_window.width)]) for data, bnds, idx in zip(rst_data, window_bnds, datasets): diff --git a/unit_tests/README.md b/unit_tests/README.md deleted file mode 100644 index 6612b9761..000000000 --- a/unit_tests/README.md +++ /dev/null @@ -1,155 +0,0 @@ -# Inundation Mapping: Flood Inundation Mapping for U.S. National Water Model - -Flood inundation mapping software configured to work with the U.S. National Water Model operated and maintained by the National Oceanic and Atmospheric Administration (NOAA) National Water Center (NWC). - -#### For more information, see the [Inundation Mapping Wiki](https://github.com/NOAA-OWP/inundation-mapping/wiki). - -# The `/unit_tests` folder contains files for unit testing python files - -## Creating unit tests - -For each python code file that is being tested, unit tests should come in two files: a unit test file (based on the original python code file) and an accompanying json paramerters file. - -The files should be named following FIM convention: - -{source py file name}_test.py -> `derive_level_paths_test.py` -{source py file name}_params.json -> `derive_level_paths_params.json` - - -## Tips to create a new json file for a new python unit test file. - -There are multiple way to figure out a set of default json parameters for the new unit test file. - -One way is to use the incoming arg parser. Most python files include the code block of ` __name__ == '__main__':`, followed by external arg parsing (`args = vars(parser.parse_args()`). -* Add a `print(args)` or similar, and get all the values including keys as output. -* Copy that into an editor being used to create the json file. -* Add a line break after every comma. -* Find/replace all single quotes to double quotes then cleanup the left tab formatting. - - -## Setting up unit test data - -You can either pull the unit test data from s3, or generate it: - -1.) To pull the unit test data from ESIP's S3 bucket. -```bash -aws s3 cp --recursive \ - s3://noaa-nws-owp-fim/hand_fim/unit_test_data/ \ - --no-sign-request -``` -The directory which the data is copied into must be named: `unit_test_data` - -2.) To generate the required unit test data, following these steps: - -* Start a docker container as you normally would for any development. -```bash -docker run --rm -it --name \ - -v /home//projects//:/foss_fim \ - {your docker image name} -``` -Example: -```bash -docker run --rm -it --name mytest \ - -v /home/abcd/projects/dev/innudation-mapping/:/foss_fim \ - -v /abcd_share/foss_fim/outputs/:/outputs \ - -v /abcd_share/foss_fim/:/data \ - -v /abcd_share/foss_fim/outputs_temp/:/fim_temp \ - fim_4:dev_20220208_8eba0ee -``` -* Call the `fim_pipeline.sh` script with the necessary arguments (`-n` must be `unit_test_data`). -```bash -fim_pipeline.sh -n unit_test_data -u "02020005 05030104" \ - -bd /foss_fim/config/deny_branch_unittests.lst \ - -ud None -j 1 -o -``` - -__NOTICE:__ the deny file used for fim_pipeline.sh, has a special one for unittests `deny_branch_unittests.lst`. - -* If you need to run inundation tests, fun the following: - -```bash -python3 foss_fim/tools/synthesize_test_cases.py \ - -c DEV \ - -v unit_test_data \ - -jh 1 -jb 1 \ - -m /data/outputs/unit_test_data/alpha_test_metrics.csv \ - -o -``` - -## Running unit tests - -### If you'd like to test the whole unit test suite: -When inside of the container, ensure you are within the root directory of the repository before running the `pytest` command. -``` -cd /foss_fim -pytest /foss_fim/unit_tests -``` - -__NOTE:__ This is subject to error, as the downloaded/generated data could potentially have a different path than what is specified in the `/unit_tests/*_params.json` files. The data files are not included in this repository, so are subject to change. - -### If you want to test just one unit test (from the root terminal window): - -```bash -cd /foss_fim -pytest unit_tests/gms/derive_level_paths_test.py - or -pytest unit_tests/inundate_gms_test.py -``` - -### If you'd like to run a particular test, you can, for example: -``` -pytest -v -s -k test_append_id_to_file_name_single_identifier_success -``` - -If one test case is choosen, it will scan all of the test files, and scan for the method (test case) specified. - -## Key Notes for creating new unit tests -1) All test functions must start with the phrase `test_`. That is how pytest picks them up. The rest of the function name does not have to match the pattern of `function_name_being_tested` but should. Further, the rest of the function name should say what the test is about, ie) `_failed_input_path`. ie) `test_{some_function_name_from_the_source_code_file}_failed_input_path`. It is fine that the function names get very long (common in the industry). - -2) If you are using this for development purposes, use caution when checking back in files for unit tests files and json file. If you check it in, it still has to work and work for others and not just for a dev test you are doing. - -3) As of now, you can not control the order that unit tests are run within a unit test file. - -4) There must be at least one associated `{original py file name}_params.json` file per unit test. - -5) There must be at least one "happy path (successful)" test inside the unittest file. ie) one function that is expected to fully pass. You can have multiple "happy path" tests if you want to change values that are fundamentally different, but fully expected to pass. - -6) Json files can have multiple nodes, so the default "happy path/success" is suggested to be called `valid_data`, if one does not already exist. Generally, the individual unit tests, will call the `valid_data` node and override a local method value to a invalid data. In semi-rare, but possible cases, you can add more nodes if you like, but try not to create new Json nodes for a few small field changes, generally only use a new node if there are major and lots of value changes (ie: major different test conditions). - -7) Unit test functions can and should test for all "outputs" from a source function. This includes the functions's return output (if any), but any global variables it might set, and even that saved output files (such as .tif files) have been created and successfully. It is ok to have multiple validation checks (or asserts) in one unit test function. - -8) One Python file = one `{original py file name}_test.py` file. - -9) Sometimes you may want to run a full successful "happy path" version through `fim_pipeline.sh` (or similar), to get all of the files you need in place to do your testing. However, you will want to ensure that none of the outputs are being deleted during the test. One way to solve this is to put in an invalid value for the `-d` parameter (denylist). -ie: - ```bash - fim_pipeline.sh -n unit_test_data -u 05030104 \ - -c /foss_fim/config/params_template.env -j 1 -d /foss_fim/config/deny_unit_default.lst -o - ``` - - but ours would be: - - ```bash - fim_pipeline.sh -n unit_test_data -u 05030104 \ - -c /foss_fim/config/params_template.env -j 1 -d no_list -o - ``` - -## [Pytest](https://docs.pytest.org/en/7.2.x/) particulars - -The `unit_tests/pyproject.toml` file is used to specify which warnings are disabled for our particular unit tests. - -Luckily, `pytest` works well with The Python Standard Library `unittest`. This made the migration of previous unit tests using `unittest` over to `pytest` quite simple. The caveat is that our current unit tests employ elements of both libraries. A full transition to `pytest` will ideally take place at a future date. - -## Testing for failing conditions -- Over time, you want to start adding functions that specifically look for fail conditions. This is a key part of unit test systems. It is not uncommon to have many dozens of tests functions in one unit test file. Each "fail" type test, must check for ONLY one variable value change. A "fail" test function should not fundamentally pass in an invalid huc AND an invalid file path. Those two failing test conditions and must have two seperate unit test functions. - -- It is possible to let a unit test have more than one failed value but only if they are tightly related to trigger just one failure (RARE though). YES.. Over time, we will see TONS of these types of fail unit test functions and they will take a while to run. - -- When you create a "fail" test function, you can load up the normal full "params" from the json file, but then you can override (hardcoded) the one (or rarely more than one) variable inside the function. There is a way to "catch" a failure you are expecting, ensure it is the type of failure you expected and make that "failure" to become a true fail, ie) a unit test pass. - -An example is in `unit_tests/Derive_level_paths_test.py` -> `test_Derive_level_paths_invalid_input_stream_network` (function). This example gives you the pattern implemented in Pytest. - -## Future Enhancements -1) Full transition to the `pytest` library, removing classes of `unittest.TestCase` and taking full advantage of available code re-use patterns offered through `pytest`. - -2) Over time, it is expected that python files will be broken down to many functions inside the file. Currently, we tend to have one very large function in each python file which makes unit testing harder and less specific. Generally, one function will correlated to at least one "happy path" unit test function. Note: The files `derive_level_paths_test.py` and `clip_vectors_to_wbd_test.py` are not complete as they do not yet test all output from a method. diff --git a/unit_tests/__template.json b/unit_tests/__template.json deleted file mode 100644 index c7c190673..000000000 --- a/unit_tests/__template.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "valid_data": { - "some_param_name__suggested_to_match_the_method_param_name": "some value", - "another_one_and_as_many_lines_as_you_need": "some value", - "a_number": 20, - "a_list": [ - "value 1", - "value 2" - ], - "inline_json_comments_dont_work": "sorry" - } -} diff --git a/unit_tests/__template.py b/unit_tests/__template.py deleted file mode 100644 index 946e06474..000000000 --- a/unit_tests/__template.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import unittest - -import pytest -import Your_original_source_python_file_name as src -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - - -class test_Your_original_source_python_file_name(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - # MUST start with the name of "test_" - # This is the (or one of the) valid test expected to pass - def test_method_name_you_will_test(self): - """ - < UPDATE THESE NOTES: to say what you are testing and what you are expecting. - If there is no return value from the method, please say so.> - - Dev Notes: (which can be removed after you make this file) - Remember... You need to validate the method output if there is any. However, if you have time, it - is also recommended that you validate other outputs such as writing or updating file on the file - system, aka: Does the expected file exist. Don't worry about its contents. - """ - - # global params_file - params = self.params[ - # update "valid_data" value if you need to (aka.. more than one node) - "valid_data" - ].copy() - - # for now we are happy if no exceptions are thrown. - - # See the readme.md, clip_vectors_to_wbd_test.py or gms/derive_level_paths_test.py for examples. - # Replace this stub example with your own. - # Try to use the same order to make it easier. - # Remember, if the method accepts **params, then you can send that in here as well. - # ie: my_py_class.my_method(** params) - - src.subset_vector_layers( - hucCode=params["hucCode"], - nwm_streams_filename=params["nwm_streams"], - etc=params["a_number"], - etc2=params["a_list"], - ) - - # The assert statement is what we are acutally testing, one that evaluates as True passes, - # and one that evaluates to False fails. - # A message (string) can be added after the assert statement to provide detail on - # the case being tested, and why it failed. - - assert os.path.exists(params["nwm_streams"]) is True, "The nwm_streams file does not exist" - - # EXAMPLE OF A SUCCESSFUL TEST CASE WHICH CAPTURES AN EXCEPTION (FAILURE) - def test_subset_vector_layers_fail_invalid_stream_path(self): - """ - Notes about what the test is and the expected results (or expected exception if applicable) - """ - - params = self.params[ - # update "valid_data" value if you need to (aka.. more than one node) - "valid_data" - ].copy() - - params["nwm_streams"] = "/some/bad/path/" - - with pytest.raises(Exception): - src.subset_vector_layers( - hucCode=params["hucCode"], - nwm_streams_filename=params["nwm_streams"], - etc=params["a_number"], - etc2=params["a_list"], - ) diff --git a/unit_tests/aggregate_branch_lists_params.json b/unit_tests/aggregate_branch_lists_params.json deleted file mode 100644 index d1090612c..000000000 --- a/unit_tests/aggregate_branch_lists_params.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "valid_data": { - "output_dir": "/data/outputs/unit_test_data/", - "file_name": "branch_ids.csv", - "output_file_name": "/data/outputs/unit_test_data/test.csv" - } -} diff --git a/unit_tests/aggregate_branch_lists_test.py b/unit_tests/aggregate_branch_lists_test.py deleted file mode 100644 index 93c4f29d8..000000000 --- a/unit_tests/aggregate_branch_lists_test.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python3 - -import json -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import aggregate_branch_lists as src - - -class test_aggregate_branch_lists(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - def test_aggregate_branch_lists_success(self): - # global params_file - - params = self.params["valid_data"].copy() - - src.aggregate_branch_lists( - output_dir=params["output_dir"], - file_name=params["file_name"], - output_file_name=params["output_file_name"], - ) diff --git a/unit_tests/check_unit_errors_params.json b/unit_tests/check_unit_errors_params.json deleted file mode 100644 index 33a144fe6..000000000 --- a/unit_tests/check_unit_errors_params.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "valid_data": { - "fim_dir": "/data/outputs/unit_test_data" - }, - "invalid_path": { - "fim_dir": "/data/outputs/check_errors_example_unit_tests_not_not_valid" - } -} diff --git a/unit_tests/check_unit_errors_test.py b/unit_tests/check_unit_errors_test.py deleted file mode 100644 index 2eba1e9d0..000000000 --- a/unit_tests/check_unit_errors_test.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python3 - -import json -import math -import os -import shutil -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import check_unit_errors as src -from utils.shared_variables import UNIT_ERRORS_MIN_NUMBER_THRESHOLD, UNIT_ERRORS_MIN_PERCENT_THRESHOLD - - -class test_check_unit_errors(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - # get_params_filename function in ./unit_test_utils handles errors - params_file_path = ut_helpers.get_params_filename(__file__) - - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - """ - To make most of this unit test, we have to do the following: - - rename the current unit_errors directory (if it exists) - - empty the unit_errors directory - - create a bunch of dummy files in it - - perform the unit test - - delete the unit_errors directory - - rename the original unit_test folder back to unit_tests - """ - - # Test Cases: - - def test_check_unit_errors_success_below_min_errors(self): - # Expecting no errors. - # Test to ensure the number of dummy files is less than the overall min number of error files. - - params = self.params["valid_data"].copy() - - num_dummy_files_reqd = UNIT_ERRORS_MIN_NUMBER_THRESHOLD - 1 - - self.__create_temp_unit_errors_folder_files(params["fim_dir"], num_dummy_files_reqd) - - expected_output = 0 - actual_output = src.check_unit_errors(params["fim_dir"], num_dummy_files_reqd) - - err_msg = "Number of dummy files IS NOT less than the overall min number of error files." - assert expected_output == actual_output, err_msg - - self.__remove_temp_unit_errors_folder(params["fim_dir"]) - - def test_check_unit_errors_fail_above_min_errors(self): - # Test to ensure the number of dummy files is more than the overall min number of error files. - # Expecting sys.exit of 62 - # We do expect this to fail and if it fails, it is successful. - # Here we expect an exception, and are capturing it using pytest.raises(Exception) - - params = self.params["valid_data"].copy() - - num_dummy_files_reqd = UNIT_ERRORS_MIN_NUMBER_THRESHOLD + 1 - - self.__create_temp_unit_errors_folder_files(params["fim_dir"], num_dummy_files_reqd) - - with pytest.raises(Exception): - src.check_unit_errors(params["fim_dir"], num_dummy_files_reqd) - - # We have to put the unit_errors folders back to the way it was - self.__remove_temp_unit_errors_folder(params["fim_dir"]) - - def test_check_unit_errors_success_above_percent_errors(self): - # Expecting no errors. - # Test to ensure the number of dummy files is more than the overall min number of error files. - # We do expect this not to to fail as it is greater than 10 errors but below the percent threshhold. - - params = self.params["valid_data"].copy() - - num_dummy_files_reqd = UNIT_ERRORS_MIN_NUMBER_THRESHOLD * 2 - - self.__create_temp_unit_errors_folder_files(params["fim_dir"], num_dummy_files_reqd) - - num_total_units = math.trunc(num_dummy_files_reqd * (100 / UNIT_ERRORS_MIN_PERCENT_THRESHOLD)) + 1 - expected_output = 0 - actual_output = src.check_unit_errors(params["fim_dir"], num_total_units) - - err_msg = "Number of dummy files IS NOT more than the overall min number of error files" - assert expected_output == actual_output, err_msg - - # We have to put the unit_errors folders back to the way is was. - self.__remove_temp_unit_errors_folder(params["fim_dir"]) - - def test_check_unit_errors_fail_below_percent_errors(self): - # Expecting sys.exit of 62 - # Test to ensure the number of dummy files is more than the overall min number of error files. - - # We do expect this to fail as it is greater than 10 errors - # AND below the percent threshhold (more percent errors than the threshold) - # Here we expect an exception, and are capturing it using pytest.raises(Exception) - - params = self.params["valid_data"].copy() - - num_dummy_files_reqd = UNIT_ERRORS_MIN_NUMBER_THRESHOLD * 2 - - self.__create_temp_unit_errors_folder_files(params["fim_dir"], num_dummy_files_reqd) - - num_total_units = math.trunc(num_dummy_files_reqd * (100 / UNIT_ERRORS_MIN_PERCENT_THRESHOLD)) - 10 - - with pytest.raises(Exception): - src.check_unit_errors(params["fim_dir"], num_total_units) - - # We have to put the unit_errors folders back to the way it was. - self.__remove_temp_unit_errors_folder(params["fim_dir"]) - - # Helper functions: - - def __create_temp_unit_errors_folder_files(self, output_folder, number_of_files): - """ - Process: - We want to preserve the original unit_errors folder if it exists, - so we wil rename it. - Then we will make a new unit_errors folder and fill it with a bunch of - dummy files. - A dummy file for non_zero_exit_codes.log will also be created. - Input: - output_folder: the root output folder (ie. /outputs/gms_example_unit_tests/) - number_of_files: how many dummy files to create - Returns: - True if the 'unit_errors' folder did original exist and needs to be renamed back. - False if the 'unit_errors' folder never existed in the first place. - """ - - ue_folder_preexists = False - - if not os.path.isdir(output_folder): - raise Exception(f"unit test root folder of {output_folder} does not exist") - - ue_folder = os.path.join(output_folder, "unit_errors") - temp_ue_folder = ue_folder + "_temp" - if os.path.isdir(ue_folder): - ue_folder_preexists = True - os.rename(ue_folder, temp_ue_folder) - - os.mkdir(ue_folder) - - for i in range(0, number_of_files): - file_name = "sample_" + str(i) + ".txt" - file_path = os.path.join(ue_folder, file_name) - with open(file_path, "w") as fp: - fp.close() - pass - - return ue_folder_preexists - - def __remove_temp_unit_errors_folder(self, output_folder): - """ - Process: - We want to preserve the original unit_errors folder if it exists, - so we will delete our temp unit test version of 'unit_errors', and rename - the original back to 'unit_errors' - Note.. it is possible the temp folder does not exist, - but we don't need to error out on it. Sometimes we got here by a try/catch cleanup - Input: - output_folder: the root output folder (ie. /data/outputs/unit_test_data/) - Returns: - nothing - """ - - ue_folder = os.path.join(output_folder, "unit_errors") - if os.path.isdir(ue_folder): - shutil.rmtree(ue_folder) - - temp_ue_folder = ue_folder + "_temp" - if os.path.isdir(temp_ue_folder): - os.rename(temp_ue_folder, ue_folder) - - # Test Cases for Helper funcitons: - - def test_create_temp_unit_errors_folder_files(self): - # Here we are testing our helper function to see if it raise exceptions appropriately with a bad path. - # In this case, we want the exception to be raised if there is an invalid path. - - params = self.params["invalid_path"] - invalid_folder = params["fim_dir"] - - with pytest.raises(Exception): - self.__create_temp_unit_errors_folder_files(invalid_folder, 4) - - def test_remove_temp_unit_errors_folder(self): - # Test of out helper function to see if the temp folder was removed. - - params = self.params["valid_data"].copy() - - self.__create_temp_unit_errors_folder_files(params["fim_dir"], 2) - - self.__remove_temp_unit_errors_folder(params["fim_dir"]) - - temp_folder_created = os.path.join(params["fim_dir"], "unit_errors") + "_temp" - - assert os.path.exists(temp_folder_created) is False diff --git a/unit_tests/clip_vectors_to_wbd_params.json b/unit_tests/clip_vectors_to_wbd_params.json deleted file mode 100644 index a6e5ea0d6..000000000 --- a/unit_tests/clip_vectors_to_wbd_params.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "valid_data": { - "subset_nwm_lakes": "/data/outputs/unit_test_data/05030104/nwm_lakes_proj_subset.gpkg", - "subset_nwm_streams": "/data/outputs/unit_test_data/05030104/nwm_subset_streams.gpkg", - "hucCode": "05030104", - "subset_nwm_headwaters": "/data/outputs/unit_test_data/05030104/nhd_headwater_points_subset.gpkg", - "wbd_buffer_filename": "/data/outputs/unit_test_data/05030104/wbd_buffered.gpkg", - "wbd_streams_buffer_filename": "/data/outputs/unit_test_data/05030104/wbd_buffered_streams.gpkg", - "wbd_filename": "/data/outputs/unit_test_data/05030104/wbd.gpkg", - "dem_filename": "/data/inputs/3dep_dems/10m_5070/fim_seamless_3dep_dem_10m_5070.vrt", - "dem_domain": "/data/inputs/3dep_dems/10m_5070/HUC6_dem_domain.gpkg", - "nwm_lakes": "/data/inputs/nwm_hydrofabric/nwm_lakes.gpkg", - "nwm_catchments": "/data/inputs/nwm_hydrofabric/nwm_catchments.gpkg", - "subset_nwm_catchments": "/data/outputs/unit_test_data/05030104/nwm_catchments_proj_subset.gpkg", - "nld_lines": "/data/inputs/nld_vectors/huc2_levee_lines/nld_preprocessed_05.gpkg", - "nld_lines_preprocessed": "/data/inputs/nld_vectors/3d_nld_preprocessed_230314.gpkg", - "landsea": "/data/inputs/landsea/water_polygons_us.gpkg", - "nwm_streams": "/data/inputs/nwm_hydrofabric/nwm_flows.gpkg", - "subset_landsea": "/data/outputs/unit_test_data/05030104/LandSea_subset.gpkg", - "nwm_headwaters": "/data/inputs/nwm_hydrofabric/nwm_headwaters.gpkg", - "subset_nld_lines": "/data/outputs/unit_test_data/05030104/nld_subset_levees.gpkg", - "subset_nld_lines_preprocessed": "/data/outputs/unit_test_data/05030104/3d_nld_subset_levees_burned.gpkg", - "wbd_buffer_distance": 5000, - "levee_protected_areas": "/data/inputs/nld_vectors/Levee_protected_areas.gpkg", - "subset_levee_protected_areas": "/data/outputs/unit_test_data/05030104/LeveeProtectedAreas_subset.gpkg" - } -} diff --git a/unit_tests/clip_vectors_to_wbd_test.py b/unit_tests/clip_vectors_to_wbd_test.py deleted file mode 100644 index b9567a3d3..000000000 --- a/unit_tests/clip_vectors_to_wbd_test.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import data.wbd.clip_vectors_to_wbd as src - - -class test_clip_vectors_to_wbd(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - # TODO New Test Case to check that files were being created on the file system - @pytest.mark.skip( - reason="pre clipped vector files are now generated using generate_pre_clip_fim_huc8.py and " - "sent to the /inputs directory. Testing the clip_vectors_to_wbd.py is no longer necessary, " - "as this script is not run during fim_pipeline.sh execution." - ) - def test_subset_vector_layers_success(self): - """ - This NEEDS be upgraded to check the output, as well as the fact that all of the output files exist. - Most of the output test and internal tests with this function will test a wide variety of conditions. - Only the basic return output value should be tested to ensure it is as expected. - For now, we are adding the very basic "happy path" test. - """ - - params = self.params["valid_data"].copy() - - # There is no default return value. - # For now we are happy if no exceptions are thrown. - try: - src.subset_vector_layers( - subset_nwm_lakes=params["subset_nwm_lakes"], - subset_nwm_streams=params["subset_nwm_streams"], - hucCode=params["hucCode"], - subset_nwm_headwaters=params["subset_nwm_headwaters"], - wbd_buffer_filename=params["wbd_buffer_filename"], - wbd_streams_buffer_filename=params["wbd_streams_buffer_filename"], - wbd_filename=params["wbd_filename"], - dem_filename=params["dem_filename"], - dem_domain=params["dem_domain"], - nwm_lakes=params["nwm_lakes"], - nwm_catchments=params["nwm_catchments"], - subset_nwm_catchments=params["subset_nwm_catchments"], - nld_lines=params["nld_lines"], - nld_lines_preprocessed=params["nld_lines_preprocessed"], - landsea=params["landsea"], - nwm_streams=params["nwm_streams"], - subset_landsea=params["subset_landsea"], - nwm_headwaters=params["nwm_headwaters"], - subset_nld_lines=params["subset_nld_lines"], - subset_nld_lines_preprocessed=params["subset_nld_lines_preprocessed"], - wbd_buffer_distance=params["wbd_buffer_distance"], - levee_protected_areas=params["levee_protected_areas"], - subset_levee_protected_areas=params["subset_levee_protected_areas"], - ) - - except (RuntimeError, TypeError, NameError) as e_info: - pytest.fail("Error in subset_vector_layers function", e_info) diff --git a/unit_tests/derive_level_paths_params.json b/unit_tests/derive_level_paths_params.json deleted file mode 100644 index df64eee5b..000000000 --- a/unit_tests/derive_level_paths_params.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "valid_data": { - "outputDestDir": "/data/outputs/unit_test_data/", - "in_stream_network": "/data/outputs/unit_test_data/05030104/nwm_subset_streams.gpkg", - "wbd": "/data/outputs/unit_test_data/05030104/wbd.gpkg", - "buffer_wbd_streams": "/data/outputs/unit_test_data/05030104/wbd_buffered_streams.gpkg", - "out_stream_network": "/data/outputs/unit_test_data/05030104/nwm_subset_streams_levelPaths.gpkg", - "branch_id_attribute": "levpa_id", - "out_stream_network_dissolved": "/data/outputs/unit_test_data/05030104/nwm_subset_streams_levelPaths.gpkg", - "headwaters_outfile": "/data/outputs/unit_test_data/05030104/nwm_headwaters.gpkg", - "catchments": "/data/outputs/unit_test_data/05030104/nwm_catchments_proj_subset.gpkg", - "catchments_outfile": "/data/outputs/unit_test_data/05030104/nwm_catchments_proj_subset_levelPaths.gpkg", - "branch_inlets_outfile": "/data/outputs/unit_test_data/05030104/nwm_subset_streams_levelPaths_dissolved_headwaters.gpkg", - "reach_id_attribute": "ID", - "verbose": true - } -} diff --git a/unit_tests/derive_level_paths_test.py b/unit_tests/derive_level_paths_test.py deleted file mode 100644 index 6ee67973d..000000000 --- a/unit_tests/derive_level_paths_test.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python3 - -import inspect -import json -import os -import sys -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -from derive_level_paths import Derive_level_paths -from stream_branches import StreamNetwork - - -class test_Derive_level_paths(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - def test_Derive_level_paths_success_all_params(self): - """ - This test includes all params with many optional parms being set to the default value of the function - Notes: - Other params such as toNode_attribute and fromNode_attribute are defaulted and not passed into - __main__ , so we skip them here. - Returns GeoDataframe (the nwm_subset_streams_levelPaths_dissolved.gpkg) - """ - - params = self.params["valid_data"].copy() - - actual_df = Derive_level_paths( - in_stream_network=params["in_stream_network"], - buffer_wbd_streams=params["buffer_wbd_streams"], - wbd=params["wbd"], - out_stream_network=params["out_stream_network"], - branch_id_attribute=params["branch_id_attribute"], - out_stream_network_dissolved=params["out_stream_network_dissolved"], - headwaters_outfile=params["headwaters_outfile"], - catchments=params["catchments"], - catchments_outfile=params["catchments_outfile"], - branch_inlets_outfile=params["branch_inlets_outfile"], - reach_id_attribute=params["reach_id_attribute"], - verbose=params["verbose"], - ) - - # test data type being return is as expected. Downstream code might to know the type. - assert isinstance(actual_df, StreamNetwork) - - # **** NOTE: Based on 05030104 - # Test row count for dissolved level path GeoDataframe which is returned. - actual_row_count = len(actual_df) - expected_row_count = 4 - assert actual_row_count == expected_row_count - - # Test that output files exist as expected - assert ( - os.path.exists(params["out_stream_network"]) is True - ), f"Expected file {params['out_stream_network']} but it does not exist." - assert ( - os.path.exists(params["out_stream_network_dissolved"]) is True - ), f"Expected file {params['out_stream_network_dissolved']} but it does not exist." - assert ( - os.path.exists(params["headwaters_outfile"]) is True - ), f"Expected file {params['headwaters_outfile']} but it does not exist." - assert ( - os.path.exists(params["catchments_outfile"]) is True - ), f"Expected file {params['catchments_outfile']} but it does not exist." - assert ( - os.path.exists(params["catchments_outfile"]) is True - ), f"Expected file {params['catchments_outfile']} but it does not exist." - assert ( - os.path.exists(params["branch_inlets_outfile"]) is True - ), f"Expected file {params['branch_inlets_outfile']} but it does not exist." - - # Invalid Input stream for demo purposes. - def test_Derive_level_paths_invalid_input_stream_network(self): - # NOTE: As we are expecting an exception, we use pytest.raises(Exception). - - params = self.params["valid_data"].copy() - params["in_stream_network"] = "some bad path" - - with pytest.raises(Exception): - Derive_level_paths( - in_stream_network=ut_helpers.json_concat(params, "outputDestDir", "in_stream_network"), - out_stream_network=params["out_stream_network"], - branch_id_attribute=params["branch_id_attribute"], - out_stream_network_dissolved=params["out_stream_network_dissolved"], - huc_id=params["huc_id"], - headwaters_outfile=params["headwaters_outfile"], - catchments=params["catchments"], - catchments_outfile=params["catchments_outfile"], - branch_inlets_outfile=params["branch_inlets_outfile"], - reach_id_attribute=params["reach_id_attribute"], - verbose=params["verbose"], - ) diff --git a/unit_tests/filter_catchments_and_add_attributes_params.json b/unit_tests/filter_catchments_and_add_attributes_params.json deleted file mode 100644 index a68a46fd7..000000000 --- a/unit_tests/filter_catchments_and_add_attributes_params.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "valid_data": { - "outputDestDir": "/outputs/unit_test_data", - "input_catchments_filename": "/data/outputs/unit_test_data/02020005/branches/2274000033/gw_catchments_reaches_2274000033.gpkg", - "input_flows_filename": "/data/outputs/unit_test_data/02020005/branches/2274000033/demDerived_reaches_split_2274000033.gpkg", - "output_catchments_filename": "/data/outputs/unit_test_data/02020005/branches/2274000033/gw_catchments_reaches_filtered_addedAttributes_2274000033.gpkg", - "output_flows_filename": "/data/outputs/unit_test_data/02020005/branches/2274000033/demDerived_reaches_split_filtered_2274000033.gpkg", - "wbd_filename": "/data/outputs/unit_test_data/02020005/wbd8_clp.gpkg", - "huc_code": "02020005" - } -} diff --git a/unit_tests/filter_catchments_and_add_attributes_test.py b/unit_tests/filter_catchments_and_add_attributes_test.py deleted file mode 100644 index 098d6935f..000000000 --- a/unit_tests/filter_catchments_and_add_attributes_test.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 - -import inspect -import json -import os -import sys -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import filter_catchments_and_add_attributes as src - - -# ************* -# Important: For this to work, when you run gms_run_branch.sh, you have to -# use deny_gms_branches_dev.lst or the word "none" for the deny list arguments -# (unit and branch deny list parameters). Key files need to exist for this unit test to work. -class test_filter_catchments_and_add_attributes(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - def test_filter_catchments_and_add_attributes_success(self): - """ - The gw_catchments_reaches_filtered_addedAttributes_.gpkg and - demDerived_reaches_split_filtered_.gpkg should not exit prior to this test. - If the test is successful, these file will be created. - """ - - params = self.params["valid_data"].copy() - - # To setup the test, lets start by deleted the two expected output files to ensure - # that they are regenerated. - if os.path.exists(params["output_flows_filename"]): - os.remove(params["output_flows_filename"]) - if os.path.exists(params["output_catchments_filename"]): - os.remove(params["output_catchments_filename"]) - - # Test that the files were deleted - assert os.path.exists(params["output_flows_filename"]) is False - - assert os.path.exists(params["output_catchments_filename"]) is False - - src.filter_catchments_and_add_attributes( - input_catchments_filename=params["input_catchments_filename"], - input_flows_filename=params["input_flows_filename"], - output_catchments_filename=params["output_catchments_filename"], - output_flows_filename=params["output_flows_filename"], - wbd_filename=params["wbd_filename"], - huc_code=params["huc_code"], - ) - - # Test that the files were created by filer_catchments_and_add_attributes - assert os.path.exists(params["output_flows_filename"]) is True - - assert os.path.exists(params["output_catchments_filename"]) is True diff --git a/unit_tests/generate_branch_list_csv_params.json b/unit_tests/generate_branch_list_csv_params.json deleted file mode 100644 index 0e2f2574a..000000000 --- a/unit_tests/generate_branch_list_csv_params.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "valid_data_add_branch_zero": { - "huc_id": "05030104", - "branch_id": "0", - "output_branch_csv": "/data/outputs/unit_test_data/05030104/branch_ids.csv" - }, - "valid_data_add_branch": { - "huc_id": "05030104", - "branch_id": "1946000003", - "output_branch_csv": "/data/outputs/unit_test_data/05030104/branch_ids.csv" - }, - "invalid_bad_file_extension": { - "huc_id": "05030104", - "branch_id": "1946000003", - "output_branch_csv": "/data/outputs/unit_test_data/05030104/branch_ids2" - } -} diff --git a/unit_tests/generate_branch_list_csv_test.py b/unit_tests/generate_branch_list_csv_test.py deleted file mode 100644 index 5495767b6..000000000 --- a/unit_tests/generate_branch_list_csv_test.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import generate_branch_list_csv as src - - -class test_generate_branch_list_csv(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # for these tests to work, we have to check if the .csv exists and remove it - # prior to exections of the tests. - - params = self.params["valid_data_add_branch_zero"].copy() - if os.path.exists(params["output_branch_csv"]): - os.remove(params["output_branch_csv"]) - - # Test Cases: - - def test_generate_branch_list_csv_valid_data_add_branch_zero_success(self): - # yes.. we know that we can not control the order - - # global params_file - params = self.params["valid_data_add_branch_zero"].copy() - - src.generate_branch_list_csv( - huc_id=params["huc_id"], - branch_id=params["branch_id"], - output_branch_csv=params["output_branch_csv"], - ) - - def test_generate_branch_list_csv_valid_data_add_branch_success(self): - # global params_file - params = self.params["valid_data_add_branch"].copy() - - src.generate_branch_list_csv( - huc_id=params["huc_id"], - branch_id=params["branch_id"], - output_branch_csv=params["output_branch_csv"], - ) - - def test_generate_branch_list_csv_invalid_bad_file_extension(self): - # global params_file - params = self.params["invalid_bad_file_extension"].copy() - - # We expect this to fail. If it does fail with an exception, then this test is sucessful. - with pytest.raises(Exception): - src.generate_branch_list_csv( - huc_id=params["huc_id"], - branch_id=params["branch_id"], - output_branch_csv=params["output_branch_csv"], - ) diff --git a/unit_tests/generate_branch_list_params.json b/unit_tests/generate_branch_list_params.json deleted file mode 100644 index 88962a708..000000000 --- a/unit_tests/generate_branch_list_params.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "valid_data": { - "stream_network_dissolved": "/data/outputs/unit_test_data/05030104/nwm_subset_streams_levelPaths_dissolved.gpkg", - "branch_id_attribute": "levpa_id", - "output_branch_list_file": "/data/outputs/unit_test_data/05030104/branch_ids.lst" - } -} diff --git a/unit_tests/generate_branch_list_test.py b/unit_tests/generate_branch_list_test.py deleted file mode 100644 index 3a35f35ea..000000000 --- a/unit_tests/generate_branch_list_test.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import generate_branch_list as src - - -class test_Generate_branch_list(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - def test_Generate_branch_list_success(self): - params = self.params["valid_data"].copy() - - src.generate_branch_list( - stream_network_dissolved=params["stream_network_dissolved"], - branch_id_attribute=params["branch_id_attribute"], - output_branch_list_file=params["output_branch_list_file"], - ) diff --git a/unit_tests/inundate_gms_params.json b/unit_tests/inundate_gms_params.json deleted file mode 100644 index 72af7da14..000000000 --- a/unit_tests/inundate_gms_params.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "valid_data_inudation_raster_single_huc": { - "hydrofabric_dir": "/data/outputs/unit_test_data/", - "forecast": "/data/inundation_review/inundation_nwm_recurr/nwm_recurr_flow_data/nwm21_17C_recurr_2_0_cms.csv", - "num_workers": 4, - "hucs": "02020005", - "inundation_raster": "/data/outputs/unit_test_data/inundation_extent_02020005.tif", - "inundation_polygon": null, - "depths_raster": null, - "verbose": true, - "log_file": "/data/outputs/unit_test_data/logs/inundation_logfile.txt", - "output_fileNames": "/data/outputs/unit_test_data/logs/inundation_file_list.csv" - } -} diff --git a/unit_tests/inundate_gms_test.py b/unit_tests/inundate_gms_test.py deleted file mode 100644 index 034830aa2..000000000 --- a/unit_tests/inundate_gms_test.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import sys -import unittest - -import inundate_gms as src -import pandas as pd -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - - -class test_inundate_gms(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - def test_Inundate_gms_create_inundation_raster_directory_single_huc_success(self): - """ - Test for creating a gms inundation rasters, not a depth raster and no - inundation_polygons. - - This test is essentially testing the Inundate_gms function, and the creation of a - the "output_fileNames" .csv file. - """ - - params = self.params["valid_data_inudation_raster_single_huc"].copy() - - # Clear previous outputs, if they exist. - for file in os.listdir(params["hydrofabric_dir"]): - if file.endswith(".tif"): - os.remove(os.path.join(params["hydrofabric_dir"], file)) - - if os.path.isfile(params["output_fileNames"]): - os.remove(params["output_fileNames"]) - - # Test the Inundate_gms function - output_fileNames_df = src.Inundate_gms( - hydrofabric_dir=params["hydrofabric_dir"], - forecast=params["forecast"], - num_workers=params["num_workers"], - hucs=params["hucs"], - inundation_raster=params["inundation_raster"], - inundation_polygon=params["inundation_polygon"], - depths_raster=params["depths_raster"], - verbose=params["verbose"], - log_file=None, - output_fileNames=params["output_fileNames"], - ) - - # Check if output files df has records. - assert len(output_fileNames_df) > 0, "Expected as least one dataframe record" - - def test_Inundate_gms_create_inundation_rasters(self): - """ - Test for creating a gms inundation rasters, not a depth raster and no - inundation_polygons. - This test is based on creating a raster based on a single huc and its branches - within the output folder. - """ - - params = self.params["valid_data_inudation_raster_single_huc"].copy() - - # Check all output rasters exist. - csv_out = pd.read_csv(params["output_fileNames"], skipinitialspace=True) - - csv_rasters = csv_out.inundation_rasters - - actual_rasters = [] - for file in os.listdir(params["hydrofabric_dir"]): - if file.endswith(".tif"): - actual_rasters.append(file) - - for csv_raster, actual_raster in zip(csv_rasters, actual_rasters): - assert os.path.exists( - os.path.join(params["hydrofabric_dir"], actual_raster) - ), f"Inundation Raster {csv_raster} does not exist" - - # Logging removed from tools/inundate_gms.py as of 8/1/23 - # assert os.path.exists(params["log_file"]), "Log file expected and does not exist" diff --git a/unit_tests/inundation_params.json b/unit_tests/inundation_params.json deleted file mode 100644 index b566789f8..000000000 --- a/unit_tests/inundation_params.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "valid_data_inundate_branch": { - "rem": "/data/outputs/unit_test_data/02020005/branches/3246000001/rem_zeroed_masked_3246000001.tif", - "catchments": "/data/outputs/unit_test_data/02020005/branches/3246000001/gw_catchments_reaches_filtered_addedAttributes_3246000001.tif", - "catchment_poly": "/data/outputs/unit_test_data/02020005/branches/3246000001/gw_catchments_reaches_filtered_addedAttributes_crosswalked_3246000001.gpkg", - "hydro_table": "/data/outputs/unit_test_data/02020005/branches/3246000001/hydroTable_3246000001.csv", - "forecast": "/data/test_cases/usgs_test_cases/validation_data_usgs/02020005/ptvn6/action/ahps_ptvn6_huc_02020005_flows_action.csv", - "mask_type": null, - "hucs": null, - "hucs_layerName": null, - "subset_hucs": null, - "num_workers": 1, - "aggregate": false, - "inundation_raster": "/data/test_cases/usgs_test_cases/02020005_usgs/testing_versions/unit_test_data/action/ptvn6_inundation_extent_02020005_3246000001.tif", - "inundation_polygon": null, - "depths": null, - "out_raster_profile": null, - "out_vector_profile": null, - "src_table": null, - "quiet": true, - "expected_inundation_raster": "/data/" - } -} diff --git a/unit_tests/inundation_test.py b/unit_tests/inundation_test.py deleted file mode 100644 index ef580c15a..000000000 --- a/unit_tests/inundation_test.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import sys -import unittest - -import inundation as src -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - - -class test_inundate(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - @pytest.mark.skip(reason="Inundate_gms will be rebuilt in the future, so this test will be skipped.") - def test_inundate_create_inundation_raster_single_branch_success(self): - """ - Test for creating a inundation branch raster, no depth raster and no - inundation_polygons, no subsets, no mask - """ - - params = self.params["valid_data_inundate_branch"].copy() - - # returns list of rasters and polys - in_rasters, depth_rasters, in_polys = src.inundate( - rem=params["rem"], - catchments=params["catchments"], - catchment_poly=params["catchment_poly"], - hydro_table=params["hydro_table"], - forecast=params["forecast"], - mask_type=params["mask_type"], - hucs=params["hucs"], - hucs_layerName=params["hucs_layerName"], - subset_hucs=params["subset_hucs"], - num_workers=params["num_workers"], - aggregate=params["aggregate"], - inundation_raster=params["inundation_raster"], - inundation_polygon=params["inundation_polygon"], - depths=params["depths"], - out_raster_profile=params["out_raster_profile"], - out_vector_profile=params["out_vector_profile"], - src_table=params["src_table"], - quiet=params["quiet"], - ) - - print("in_rasters") - print(in_rasters) - - assert len(in_rasters) == 1, "Expected exactly one inundation raster path records" - assert depth_rasters[0] is None, "Expected no depth raster path records" - assert in_polys[0] is None, "Expected no inundation_polys records" - - msg = f"Expected file {params['expected_inundation_raster']} but it does not exist." - assert os.path.exists(params["expected_inundation_raster"]) is True, msg diff --git a/unit_tests/outputs_cleanup_params.json b/unit_tests/outputs_cleanup_params.json deleted file mode 100644 index a3d70bca5..000000000 --- a/unit_tests/outputs_cleanup_params.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "valid_specific_branch_data": { - "src_dir": "/data/outputs/unit_test_data/02020005/branches/2274000018", - "deny_list": "/foss_fim/config/deny_branches.lst", - "branch_id": "2274000018", - "verbose": true - }, - "valid_directory_data": { - "src_dir": "/data/outputs/unit_test_data", - "deny_list": "/foss_fim/config/deny_branches.lst", - "branch_id": "0", - "verbose": true - }, - "skip_clean": { - "src_dir": "/data/outputs/unit_test_data", - "deny_list": "None", - "branch_id": "", - "verbose": true - } -} diff --git a/unit_tests/outputs_cleanup_test.py b/unit_tests/outputs_cleanup_test.py deleted file mode 100644 index 5d152864f..000000000 --- a/unit_tests/outputs_cleanup_test.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python3 - -import io -import json -import os -import sys -import unittest -from contextlib import redirect_stdout -from pathlib import Path - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import outputs_cleanup as src -from utils.shared_functions import FIM_Helpers as fh - - -class test_outputs_cleanup(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - def test_remove_deny_list_files_specific_branch_success(self): - """ - This validates removal of files for a directory already pointing to a - specific branch in a HUC - """ - - params = self.params["valid_specific_branch_data"].copy() - - # Gather all of the file names into an array from the deny_list - deny_files = self.__get_deny_list_filenames( - params["src_dir"], params["deny_list"], params["branch_id"] - ) - - # Test whether we have a list of files to check - assert len(deny_files) > 0 - - src.remove_deny_list_files( - src_dir=params["src_dir"], - deny_list=params["deny_list"], - branch_id=params["branch_id"], - verbose=params["verbose"], - ) - - assert self.__check_no_deny_list_files_exist(params["src_dir"], deny_files) is True - - def test_remove_deny_list_files_huc_level_success(self): - """ - This validates removal of files for all files and subdirectory files. - Normally used for covering all hucs and their branch zeros but - can be anything - """ - - params = self.params["valid_directory_data"].copy() - - # Gather all of the file names into an array from the deny_list - deny_files = self.__get_deny_list_filenames( - params["src_dir"], params["deny_list"], params["branch_id"] - ) - - # Test whether we have a list of files to check - assert len(deny_files) > 0 - - src.remove_deny_list_files( - src_dir=params["src_dir"], - deny_list=params["deny_list"], - branch_id=params["branch_id"], - verbose=params["verbose"], - ) - - assert self.__check_no_deny_list_files_exist(params["src_dir"], deny_files) is True - - def test_remove_deny_list_skip_cleaning_success(self): - """ - This validates removal of files for all files and subdirectory files. - Normally used for covering all hucs and their branch zeros but - can be anything - """ - - params = self.params["skip_clean"].copy() - - deny_files = self.__get_deny_list_filenames( - params["src_dir"], params["deny_list"], params["branch_id"] - ) - - # Ensure we have a value of "None" for a deny_list value, - # __get_deny_list_filenames returns an empty array if "None" is provided - assert len(deny_files) == 0 - - # This is tricky, as we're capturing the stdout (return statement) from remove_deny_list_files, - # to verify the function is returning at the correct place, and not removing files - # when we do not provide a deny list file. We set f to the io stream, and redirect it using - # redirect_stdout. - f = io.StringIO() - with redirect_stdout(f): - src.remove_deny_list_files( - src_dir=params["src_dir"], - deny_list=params["deny_list"], - branch_id=params["branch_id"], - verbose=params["verbose"], - ) - - # Get the stdout value of remove_deny_list_files and set it to skip_clean_out - skip_clean_out = f.getvalue() - - # This string must match the print statement in /src/gms/outputs_cleanup.py, including the \n newline, - # which occurs "behind the scenes" with every call to print() in Python - assert skip_clean_out == "file clean via the deny list skipped\n" - - def test_remove_deny_list_files_invalid_src_directory(self): - """ - Double check the src directory exists - """ - - params = self.params["valid_specific_branch_data"].copy() - params["src_dir"] = "/data/does_no_exist" - - # We want an exception to be thrown here, if so, the test passes. - with pytest.raises(Exception): - src.remove_deny_list_files( - src_dir=params["src_dir"], - deny_list=params["deny_list"], - branch_id=params["branch_id"], - verbose=params["verbose"], - ) - - def test_remove_deny_list_files_invalid_deny_list_does_not_exist(self): - """ - Double check the deny list exists - """ - - params = self.params["valid_specific_branch_data"].copy() - params["deny_list"] = "invalid_file_name.txt" - - # We want an exception to be thrown here, if so, the test passes. - with pytest.raises(Exception): - src.remove_deny_list_files( - src_dir=params["src_dir"], - deny_list=params["deny_list"], - branch_id=params["branch_id"], - verbose=params["verbose"], - ) - - # Helper Functions: - - def __get_deny_list_filenames(self, src_dir, deny_list, branch_id): - deny_list_files = [] - - if deny_list == "None": - return deny_list_files - - # Note: some of the deny_file_names might be a comment line - # this will validate file exists - deny_file_names = fh.load_list_file(deny_list.strip()) - - for deny_file_name in deny_file_names: - # Only add files to the list that do not start with a # - deny_file_name = deny_file_name.strip() - if deny_file_name.startswith("#"): - continue - - deny_file_name = deny_file_name.replace("{}", branch_id) - - deny_list_files.append(deny_file_name) - - return deny_list_files - - def __check_no_deny_list_files_exist(self, src_dir, deny_array): - found_files = [] - - for file_name in deny_array: - found_files.append(os.path.join(src_dir, file_name)) - - for found_file in found_files: - if os.path.exists(found_file): - return False - - return True diff --git a/unit_tests/pyproject.toml b/unit_tests/pyproject.toml deleted file mode 100644 index 3ed480446..000000000 --- a/unit_tests/pyproject.toml +++ /dev/null @@ -1,8 +0,0 @@ -[tool.pytest.ini_options] -filterwarnings = [ - "error", - "ignore::UserWarning", - "ignore::DeprecationWarning", - "ignore::PendingDeprecationWarning", - "ignore::RuntimeWarning", -] diff --git a/unit_tests/rating_curve_comparison_params.json b/unit_tests/rating_curve_comparison_params.json deleted file mode 100644 index 89c254900..000000000 --- a/unit_tests/rating_curve_comparison_params.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "valid_data": { - "02020005": [ - "/data/outputs/unit_test_data/02020005/usgs_elev_table.csv", - "/data/outputs/unit_test_data/02020005/branches", - "/data/inputs/usgs_gages/usgs_rating_curves.csv", - "/data/outputs/unit_test_data/rating_curve_comparison/tables/usgs_interpolated_elevation_stats_02020005.csv", - "/data/outputs/unit_test_data/rating_curve_comparison/tables/nwm_recurrence_flow_elevations_02020005.csv", - "/data/outputs/unit_test_data/rating_curve_comparison/plots/FIM-USGS_rating_curve_comparison_02020005.png", - "/data/inundation_review/inundation_nwm_recurr/nwm_recurr_flow_data/", - "/data/inputs/usgs_gages/catfim_flows_cms.csv", - "02020005", - false, - false - ] - } -} diff --git a/unit_tests/rating_curve_comparison_test.py b/unit_tests/rating_curve_comparison_test.py deleted file mode 100644 index 89352cb26..000000000 --- a/unit_tests/rating_curve_comparison_test.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import unittest - -import pytest -from rating_curve_comparison import generate_rating_curve_metrics -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - - -class test_rating_curve_comparison(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases - - @pytest.mark.skip( - reason="rating_curve has invalid pathing based on files " - "that are normally not in a runtime folder. This test will disabled for now." - ) - def test_generate_rating_curve_metrics_02020005_success(self): - """ - We are testing whether a .png file was created for a FIM-USGS rating curve comparison, - for HUC 02020005 using the `generate_rating_curve_metrics` function. - The 5th index (parameter) for each HUC in `rating_curve_comparison_params.json` specifies - the FIM-USGS rating curve comparison .png filepath. - """ - - params = self.params[ - "valid_data" - ].copy() # update "valid_data" value if you need to (aka.. more than one node) - - _indiv_huc_params = params["02020005"] - - # To setup the test, lets start by deleted the expected output file to ensure - # that it is regenerated. - if os.path.exists(_indiv_huc_params[5]): - os.remove(_indiv_huc_params[5]) - - # Test that the file was deleted - assert os.path.exists(_indiv_huc_params[5]) is False - - generate_rating_curve_metrics(_indiv_huc_params) - - # Test that the file was created by generate_rating_curve_metrics - assert os.path.exists(_indiv_huc_params[5]) is True diff --git a/unit_tests/shared_functions_params.json b/unit_tests/shared_functions_params.json deleted file mode 100644 index aff840dd5..000000000 --- a/unit_tests/shared_functions_params.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "append_append_id_to_file_name_single_identifier_valid": { - "outputDestDir": "/output/unit_test_data/", - "file_name": "02020005/branches/3246000003/rem_zeroed_masked_3246000003.tif", - "identifier": "8888", - "expected_output": "02020005/branches/3246000003/rem_zeroed_masked_3246000003_8888.tif" - }, - "append_append_id_to_file_name_identifier_list_valid": { - "outputDestDir": "/output/unit_test_data/", - "file_name": "02020005/branches/3246000003/rem_zeroed_masked_3246000003.tif", - "identifier": [ - "7777", - "8888" - ], - "expected_output": "02020005/branches/3246000003/rem_zeroed_masked_3246000003_7777_8888.tif" - } -} diff --git a/unit_tests/shared_functions_test.py b/unit_tests/shared_functions_test.py deleted file mode 100644 index 62264ec75..000000000 --- a/unit_tests/shared_functions_test.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python3 - -import json -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -from utils.shared_functions import FIM_Helpers as src - - -class test_shared_functions(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - def test_append_id_to_file_name_single_identifier_success(self): - """ - Pass in a file name with the single identifier and get the single adjusted file name back - """ - - params = self.params["append_append_id_to_file_name_single_identifier_valid"].copy() - - actual_output = src.append_id_to_file_name( - file_name=ut_helpers.json_concat(params, "outputDestDir", "file_name"), - identifier=params["identifier"], - ) - - err_msg = "actual output does not match expected output" - - expected_output = ut_helpers.json_concat(params, "outputDestDir", "expected_output") - - assert expected_output == actual_output, err_msg - - def test_append_id_to_file_name_indentifer_list_success(self): - """ - Pass in a file name with the list of identifiers and - get a file name back with multiple identifers added. - """ - - params = self.params["append_append_id_to_file_name_identifier_list_valid"].copy() - - actual_output = src.append_id_to_file_name( - file_name=ut_helpers.json_concat(params, "outputDestDir", "file_name"), - identifier=params["identifier"], - ) - - err_msg = "actual output does not match expected output" - - expected_output = ut_helpers.json_concat(params, "outputDestDir", "expected_output") - - assert expected_output == actual_output, err_msg - - def test_append_id_to_file_name_no_file_name_success(self): - """ - Pass in an non existant file name and get None back - """ - - params = self.params["append_append_id_to_file_name_single_identifier_valid"].copy() - - actual_output = src.append_id_to_file_name(None, identifier=params["identifier"]) - - error_msg = "actual output should not have a value" - - assert actual_output is None, error_msg diff --git a/unit_tests/split_flows_params.json b/unit_tests/split_flows_params.json deleted file mode 100644 index a4a17c248..000000000 --- a/unit_tests/split_flows_params.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "valid_data": { - "outputDestDir": "/data/outputs/unit_test_data/", - "max_length": 1500, - "slope_min": 0.001, - "lakes_buffer_input": 20, - "flows_filename": "/data/outputs/unit_test_data/02020005/branches/2274000031/demDerived_reaches_2274000031.shp", - "dem_filename": "/data/outputs/unit_test_data/02020005/branches/2274000031/dem_thalwegCond_2274000031.tif", - "split_flows_filename": "/data/outputs/unit_test_data/02020005/branches/2274000031/demDerived_reaches_split_2274000031.gpkg", - "split_points_filename": "/data/outputs/unit_test_data/02020005/branches/2274000031/demDerived_reaches_split_points_2274000031.gpkg", - "wbd8_clp_filename": "/data/outputs/unit_test_data/02020005/wbd8_clp.gpkg", - "lakes_filename": "/data/outputs/unit_test_data/02020005/nwm_lakes_proj_subset.gpkg", - "nwm_streams_filename": "/data/outputs/unit_test_data/02020005/nwm_subset_streams_levelPaths.gpkg" - } -} diff --git a/unit_tests/split_flows_test.py b/unit_tests/split_flows_test.py deleted file mode 100644 index a811fccfc..000000000 --- a/unit_tests/split_flows_test.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -import json -import os -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -import split_flows as src - - -class test_split_flows(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - # Ensure split_flows_filename & split_points_filename are created by the split_flows function - def test_split_flows_success(self): - """ - The /data/outputs/unit_test_data//branches// - demDerived_reaches_split_.gpkg and - /data/outputs/unit_test_data//branches// - demDerived_reaches_split_points_.gpkg - should not exit prior to this test. If the test is successful, these files will be created. - """ - - params = self.params["valid_data"].copy() - - # to setup the test, lets start by deleted the two expected output files to ensure - # that they are regenerated - if os.path.exists(params["split_flows_filename"]): - os.remove(params["split_flows_filename"]) - if os.path.exists(params["split_points_filename"]): - os.remove(params["split_points_filename"]) - - error_msg = params["split_flows_filename"] + " does exist, when it should not (post os.remove call)" - assert os.path.exists(params["split_flows_filename"]) is False, error_msg - - error_msg = params["split_points_filename"] + " does exist, when it should not (post os.remove call)" - assert os.path.exists(params["split_points_filename"]) is False, error_msg - - src.split_flows( - max_length=params["max_length"], - slope_min=params["slope_min"], - lakes_buffer_input=params["lakes_buffer_input"], - flows_filename=params["flows_filename"], - dem_filename=params["dem_filename"], - split_flows_filename=params["split_flows_filename"], - split_points_filename=params["split_points_filename"], - wbd8_clp_filename=params["wbd8_clp_filename"], - lakes_filename=params["lakes_filename"], - nwm_streams_filename=params["nwm_streams_filename"], - ) - - error_msg = params["split_flows_filename"] + " does not exist" - assert os.path.exists(params["split_flows_filename"]) is True, error_msg - - error_msg = params["split_points_filename"] + " does not exist" - assert os.path.exists(params["split_points_filename"]) is True, error_msg diff --git a/unit_tests/unit_tests_utils.py b/unit_tests/unit_tests_utils.py deleted file mode 100644 index 847158b0d..000000000 --- a/unit_tests/unit_tests_utils.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python3 - -import errno -import os -import sys - - -class FIM_unit_test_helpers(object): - @staticmethod - def get_params_filename(unit_test_file_name): - unittest_file_name = os.path.basename(unit_test_file_name) - params_file_name = unittest_file_name.replace("_test.py", "_params.json") - params_file_path = os.path.join(os.path.dirname(unit_test_file_name), params_file_name) - - if not os.path.exists(params_file_path): - raise FileNotFoundError(f"{params_file_path} does not exist") - - return params_file_path - - @staticmethod - def json_concat(obj, keyone, keytwo): - concat_string = obj[keyone] + obj[keytwo] - - return concat_string diff --git a/unit_tests/usgs_gage_crosswalk_params.json b/unit_tests/usgs_gage_crosswalk_params.json deleted file mode 100644 index e65627bed..000000000 --- a/unit_tests/usgs_gage_crosswalk_params.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "valid_data": { - "usgs_gages_filename": "/data/outputs/unit_test_data/02020005/usgs_subset_gages.gpkg", - "input_flows_filename": "/data/outputs/unit_test_data/02020005/branches/2274000028/demDerived_reaches_split_filtered_2274000028.gpkg", - "input_catchment_filename": "/data/outputs/unit_test_data/02020005/branches/2274000028/gw_catchments_reaches_filtered_addedAttributes_2274000028.gpkg", - "dem_filename": "/data/outputs/unit_test_data/02020005/branches/2274000028/dem_meters_2274000028.tif", - "dem_adj_filename": "/data/outputs/unit_test_data/02020005/branches/2274000028/dem_thalwegCond_2274000028.tif", - "output_table_filename": "/data/outputs/unit_test_data/02020005/branches/2274000028/usgs_elev_table.csv", - "output_directory": "/data/outputs/unit_test_data/02020005/branches/2274000028", - "branch_id": "2274000028" - } -} diff --git a/unit_tests/usgs_gage_crosswalk_test.py b/unit_tests/usgs_gage_crosswalk_test.py deleted file mode 100644 index 863c61590..000000000 --- a/unit_tests/usgs_gage_crosswalk_test.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python3 - -import json -import os -import sys -import unittest - -import pytest -from unit_tests_utils import FIM_unit_test_helpers as ut_helpers - -from usgs_gage_crosswalk import GageCrosswalk - - -class test_usgs_gage_crosswalk(unittest.TestCase): - """ - Allows the params to be loaded one and used for all test methods - """ - - @classmethod - def setUpClass(self): - params_file_path = ut_helpers.get_params_filename(__file__) - with open(params_file_path) as params_file: - self.params = json.load(params_file) - - # Test Cases: - - def test_GageCrosswalk_success(self): - """ - Test whether the GageCrosswalk object can be instantiated, and test that the run_crosswalk method can - successfully create the output table (usgs_elev_table.csv). - """ - - params = self.params[ - "valid_data" - ].copy() # update "valid_data" value if you need to (aka.. more than one node) - - # Delete the usgs_elev_table.csv if it exists - if os.path.exists(params["output_table_filename"]): - os.remove(params["output_table_filename"]) - - # Verify the usgs_elev_table.csv was deleted - msg = f'{params["output_table_filename"]} does exist, when it should have been deleted' - assert os.path.exists(params["output_table_filename"]) is False, msg - - # Instantiate and run GageCrosswalk - gage_crosswalk = GageCrosswalk(params["usgs_gages_filename"], params["branch_id"]) - - # Run crosswalk - gage_crosswalk.run_crosswalk( - params["input_catchment_filename"], - params["input_flows_filename"], - params["dem_filename"], - params["dem_adj_filename"], - params["output_directory"], - ) - - # Make sure that the usgs_elev_table.csv was written - msg = f'{params["output_table_filename"]} does not exist' - assert os.path.exists(params["output_table_filename"]) is True, msg