From 3466b21c071436acc33e8cdd1d477fe19fc8cbc3 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 09:38:54 -0400 Subject: [PATCH 01/34] move history triggers to the models package Signed-off-by: John DeAngelis --- .../ops/history.py => models/utils.py} | 25 +++++++++---------- backend/ops_api/ops/__init__.py | 6 ++--- 2 files changed, 15 insertions(+), 16 deletions(-) rename backend/{ops_api/ops/history.py => models/utils.py} (92%) diff --git a/backend/ops_api/ops/history.py b/backend/models/utils.py similarity index 92% rename from backend/ops_api/ops/history.py rename to backend/models/utils.py index edba2a050b..9c1b64cad2 100644 --- a/backend/ops_api/ops/history.py +++ b/backend/models/utils.py @@ -1,13 +1,11 @@ import json -import logging from collections import namedtuple from datetime import date, datetime from decimal import Decimal from enum import Enum from types import NoneType -from flask import current_app -from flask_jwt_extended import current_user +from loguru import logger from sqlalchemy import inspect from sqlalchemy.cyextension.collections import IdentitySet from sqlalchemy.orm import Session @@ -22,6 +20,7 @@ OpsDBHistory, OpsDBHistoryType, OpsEvent, + User, ) DbRecordAudit = namedtuple("DbRecordAudit", "row_key changes") @@ -113,13 +112,13 @@ def build_audit(obj, event_type: OpsDBHistoryType) -> DbRecordAudit: # noqa: C9 return DbRecordAudit(row_key, changes) -def track_db_history_before(session: Session): - session.add_all(add_obj_to_db_history(session.deleted, OpsDBHistoryType.DELETED)) - session.add_all(add_obj_to_db_history(session.dirty, OpsDBHistoryType.UPDATED)) +def track_db_history_before(session: Session, user: User | None): + session.add_all(add_obj_to_db_history(session.deleted, OpsDBHistoryType.DELETED, user)) + session.add_all(add_obj_to_db_history(session.dirty, OpsDBHistoryType.UPDATED, user)) -def track_db_history_after(session: Session): - session.add_all(add_obj_to_db_history(session.new, OpsDBHistoryType.NEW)) +def track_db_history_after(session: Session, user: User | None): + session.add_all(add_obj_to_db_history(session.new, OpsDBHistoryType.NEW, user)) def track_db_history_catch_errors(exception_context): @@ -138,20 +137,20 @@ def track_db_history_catch_errors(exception_context): "sqlalchemy_exception": f"{exception_context.sqlalchemy_exception}", }, ) - with Session(current_app.engine) as session: + with Session(exception_context.engine) as session: session.add(ops_db) session.commit() - current_app.logger.error(f"SQLAlchemy error added to {OpsDBHistory.__tablename__} with id {ops_db.id}") + logger.error(f"SQLAlchemy error added to {OpsDBHistory.__tablename__} with id {ops_db.id}") -def add_obj_to_db_history(objs: IdentitySet, event_type: OpsDBHistoryType): +def add_obj_to_db_history(objs: IdentitySet, event_type: OpsDBHistoryType, user: User | None): result = [] for obj in objs: if not isinstance(obj, (OpsEvent, OpsDBHistory, AgreementOpsDbHistory)): # not interested in tracking these db_audit = build_audit(obj, event_type) if event_type == OpsDBHistoryType.UPDATED and not db_audit.changes: - logging.info( + logger.info( f"No changes found for {obj.__class__.__name__} with row_key={db_audit.row_key}, " f"an OpsDBHistory record will not be created for this UPDATED event." ) @@ -160,7 +159,7 @@ def add_obj_to_db_history(objs: IdentitySet, event_type: OpsDBHistoryType): ops_db = OpsDBHistory( event_type=event_type, event_details=obj.to_dict(), - created_by=current_user.id if current_user else None, + created_by=user.id if user else None, class_name=obj.__class__.__name__, row_key=db_audit.row_key, changes=db_audit.changes, diff --git a/backend/ops_api/ops/__init__.py b/backend/ops_api/ops/__init__.py index 810e374f46..f94a7eaf62 100644 --- a/backend/ops_api/ops/__init__.py +++ b/backend/ops_api/ops/__init__.py @@ -8,11 +8,11 @@ from sqlalchemy import event from sqlalchemy.orm import Session +from models.utils import track_db_history_after, track_db_history_before, track_db_history_catch_errors from ops_api.ops.auth.decorators import check_user_session_function from ops_api.ops.auth.extension_config import jwtMgr from ops_api.ops.db import handle_create_update_by_attrs, init_db from ops_api.ops.error_handlers import register_error_handlers -from ops_api.ops.history import track_db_history_after, track_db_history_before, track_db_history_catch_errors from ops_api.ops.home_page.views import home from ops_api.ops.urls import register_api from ops_api.ops.utils.core import is_fake_user, is_unit_test @@ -95,11 +95,11 @@ def shutdown_session(exception=None): @event.listens_for(db_session, "before_commit") def receive_before_commit(session: Session): - track_db_history_before(session) + track_db_history_before(session, current_user) @event.listens_for(db_session, "after_flush") def receive_after_flush(session: Session, flush_context): - track_db_history_after(session) + track_db_history_after(session, current_user) @event.listens_for(engine, "handle_error") def receive_error(exception_context): From 35f25fca4100b3b3f18463b43787e1104ef2b3ec Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 09:49:25 -0400 Subject: [PATCH 02/34] added loguru and sorted the dependencies Signed-off-by: John DeAngelis --- backend/ops_api/Pipfile | 43 +++--- backend/ops_api/Pipfile.lock | 256 +++++++++++++---------------------- 2 files changed, 115 insertions(+), 184 deletions(-) diff --git a/backend/ops_api/Pipfile b/backend/ops_api/Pipfile index 0cfd56630d..90f151d58f 100644 --- a/backend/ops_api/Pipfile +++ b/backend/ops_api/Pipfile @@ -4,41 +4,42 @@ verify_ssl = true name = "pypi" [packages] -sqlalchemy = "==2.0.36" -flask = "==3.0.3" -mypy = "==1.11.2" -flask-jwt-extended = {extras = ["asymmetric_crypto"], version = "==4.6.0"} +alembic = "==1.13.3" +alembic-postgresql-enum = "==1.3.0" authlib = "==1.3.2" -requests = "==2.32.3" -markupsafe = "==3.0.1" +azure-identity = "==1.17.1" +azure-storage-blob = "==12.22.0" +desert = "==2022.9.22" +flask = "==3.0.3" flask-cors = "==5.0.0" -psycopg2-binary = "==2.9.10" +flask-jwt-extended = {extras = ["asymmetric_crypto"], version = "==4.6.0"} gunicorn = "==22.0.0" -desert = "==2022.9.22" -marshmallow-enum = "==1.5.1" +loguru = "==0.7.2" +markupsafe = "==3.0.1" marshmallow-dataclass = "==8.7.1" -sqlalchemy-continuum = "==1.4.2" +marshmallow-enum = "==1.5.1" marshmallow-sqlalchemy = "==1.0.0" -azure-storage-blob = "==12.22.0" -azure-identity = "==1.17.1" -alembic = "==1.13.3" -alembic-postgresql-enum = "==1.3.0" +mypy = "==1.11.2" +psycopg2-binary = "==2.9.10" PyYAML = "==6.0.2" +requests = "==2.32.3" +sqlalchemy = "==2.0.36" +sqlalchemy-continuum = "==1.4.2" [dev-packages] +black = {extras = ["d"], version = "==24.8.0"} flake8 = "==7.1.1" +flake8-black = "==0.3.6" +ipython = "==8.26.0" isort = "==5.13.2" -pytest = "==8.3.3" nox = "==2024.4.15" -ipython = "==8.26.0" -pytest-flask = "==1.3.0" -black = {extras = ["d"], version = "==24.8.0"} -flake8-black = "==0.3.6" +numpy = "==2.0.1" +pytest = "==8.3.3" +pytest-bdd = "==7.2.0" pytest-cov = "==5.0.0" pytest-docker = {extras = ["docker-compose-v2"], version = "==3.1.1"} +pytest-flask = "==1.3.0" pytest-mock = "==3.14.0" -pytest-bdd = "==7.2.0" -numpy = "==2.0.1" [requires] python_version = "3.12" diff --git a/backend/ops_api/Pipfile.lock b/backend/ops_api/Pipfile.lock index ca0c579a15..ad0bab002b 100644 --- a/backend/ops_api/Pipfile.lock +++ b/backend/ops_api/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "d97384bd949b556f31cc91af6a1ebd6bdf8cd3948309e096a0e55b37906fe4a7" + "sha256": "793f7920360f5776da2c1fb5c80ad7b1f3bd60e77cd6dea3eddbf64804d486cc" }, "pipfile-spec": 6, "requires": { @@ -355,85 +355,6 @@ "markers": "python_version >= '3.7' and python_version < '4'", "version": "==4.6.0" }, - "greenlet": { - "hashes": [ - "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", - "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7", - "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", - "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", - "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", - "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563", - "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83", - "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", - "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", - "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa", - "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", - "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", - "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", - "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22", - "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9", - "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0", - "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba", - "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3", - "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", - "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", - "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291", - "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", - "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", - "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", - "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", - "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef", - "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c", - "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", - "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c", - "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", - "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", - "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8", - "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d", - "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", - "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145", - "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80", - "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", - "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e", - "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", - "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1", - "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef", - "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc", - "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", - "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120", - "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437", - "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd", - "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981", - "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", - "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", - "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798", - "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7", - "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", - "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", - "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", - "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af", - "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", - "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", - "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42", - "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e", - "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81", - "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", - "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617", - "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc", - "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de", - "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111", - "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", - "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", - "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6", - "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", - "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", - "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803", - "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", - "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f" - ], - "markers": "python_version < '3.13' and (platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32'))))))", - "version": "==3.1.1" - }, "gunicorn": { "hashes": [ "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9", @@ -475,6 +396,15 @@ "markers": "python_version >= '3.7'", "version": "==3.1.4" }, + "loguru": { + "hashes": [ + "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb", + "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac" + ], + "index": "pypi", + "markers": "python_version >= '3.5'", + "version": "==0.7.2" + }, "mako": { "hashes": [ "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a", @@ -1994,91 +1924,91 @@ }, "yarl": { "hashes": [ - "sha256:04f930fcc940f96b8b29110c56882bcff8703f87a7b9354d3acf60ffded5a23d", - "sha256:05183fd49244517cb11c208d0ae128f2e8a85ddb7caf22ad8b0ffcdf5481fcb6", - "sha256:0ace3927502a9f90a868d62c66623703cf5096dcb586187266e9b964d8dd6c81", - "sha256:12c80ec2af97ff3e433699bcabc787ef34e7c08ec038a6e6a25fb81d7bb83607", - "sha256:14d6f07b7b4b3b8fba521904db58442281730b44318d6abb9908de79e2a4e4f4", - "sha256:14effa29db6113be065a594e13a0f45afb9c1e374fd22b4bc3a4eff0725184b2", - "sha256:19077525cd36c797cae19262e15f2881da33c602fb35d075ff0e4263b51b8b88", - "sha256:1c3e9ae98719fe180751b093d02dbcc33b78a37e861d0f2c9571720bd31555db", - "sha256:1c49fe426c45520b4b8a48544d3a9a58194f39c1b57d92451883f847c299a137", - "sha256:20f8bdaf667386cea1a8f49cb69a85f90346656d750d3c1278be1dbc76601065", - "sha256:2128315cdc517a45ceb72ec17b256a7940eeb4843c66834c203e7d6580c83405", - "sha256:2207491555af5dbbee4c3179a76766f7bc1ecff858f420ea96f2e105ca42c4dd", - "sha256:24cad94cf2f46cc8e4b9cd44e4e8a84483536a6c54554960b02b10b5724ab122", - "sha256:270fef2b335e60c91ee835c524445e2248af841c8b72f48769ed6c02fbff5873", - "sha256:2a5cbbb06559757f091f9e71d3f76c27d4dfe0652cc3f17ccce398b8377bfda4", - "sha256:2e61b72cf15922a7a665299a6b6825bd9901d67ec3b9d3cf9b256dc1667c9bb1", - "sha256:32e8ebf0080ddd38ec05f8be940a3719e5fe1ab8bb6d2b3f6f8b89c9e34149aa", - "sha256:3487c57bc8f17f2586ae7fd0e77f65cd298d45b64d15f604bbb29f4cce0e7961", - "sha256:353306ba6f0218af1aefe4b9c8b3a0b81b209bc75d79357dac6aca70a7b09d6a", - "sha256:370f646d3654e196ddbf772a2d737fe4e1dd738267015b73ff6267ca592fd9d6", - "sha256:380f30073cbd9b740891bb56f44ee31f870e8721269b618ccc9913400936d9f6", - "sha256:39533b927c665bcff7da80bf299218e4af12f3e2be27e9c456e29547bcefd631", - "sha256:3cf2b50352df8775591869aaa22c52b64d60376ba99c0802b42778fedc90b775", - "sha256:45c05b87a8494d9820ea1ac82118fd2f1d795d868e94766fe8ff670377bf6280", - "sha256:46653b5fd29e63ffe63335da343829a2b00bb43b0bd9bb21240d3b42629629e2", - "sha256:493760c4ced954582db83c4760166992c016e1777ebc0f3ef1bb5eb60b2b5924", - "sha256:5685ebc333c95b75be3a0a83a81b82b6411beee9585eaeb9e2e588ae8df23848", - "sha256:59b77f0682e1917be197fc8229530f0c6fb3ef8e242d8256ba091a3a1c0ef7e6", - "sha256:59dce412b2515de05ab2eb6aef19ad7f70857ad436cd65fc4276df007106fb42", - "sha256:5a63ed17af784da3de39b82adfd4f8404ad5ee2ec8f616b063f37da3e64e0521", - "sha256:5c4cc1a438ac52562427330e33891f50a78ffd38d335abc64f93f201c83bdc82", - "sha256:63ba82841ce315e4b5dc8b9345062638c74b1864d38172d0a0403e5a083b0950", - "sha256:66ddcd7ee3264bc937860f4780290d60f6472ca0484c214fe805116a831121e8", - "sha256:68b27a7d9fb0f145de608da2e45e37fd2397b00266f10487e557f769afa2842d", - "sha256:690d8f702945506b58c9c5834d586e8fd819b845fe6239ab16ebc64a92a6fd3d", - "sha256:69c2d111e67a818e702ba957da8c8e62de916f5c1b3da043f744084c63f12d46", - "sha256:6d1aba1f644d6e5e16edada31938c11b6c9c97e3bf065742a2c7740d38af0c19", - "sha256:6da6f6c6ee5595658f21bb9d1ecd702f7a7f22f224ac063dfb595624aec4a2e0", - "sha256:6f2911cae6dd012adaaf51494dad4cafb4284ad1f3b588df6ea3e3017e053750", - "sha256:7794aade99be0d48b69bd5942acddfeff0de3d09c724d9abe4f19736708ef18f", - "sha256:7ccb4667e0c0a25815efbfe251d24b56624449a319d4bb497074dd49444fb306", - "sha256:8df77742b403e71c5d62d22d150e6e35efd6096a15f2c7419815911c62225100", - "sha256:8f0b33fd088e93ba5f7f6dd55226630e7b78212752479c8fcc6abbd143b9c1ce", - "sha256:90257bc627897a2c1d562efcd6a6b18887e9dacae795cad2367e8e16df47d966", - "sha256:925e72fc7a4222a5bf6d288876d5afacc8f833b49c4cca85f65089131ba25afa", - "sha256:92f9a45230d3aa8568c1d692ab27bf505a32dfe3b404721458fc374f411e8bd2", - "sha256:956975a3a1ce1f4537be22278d6a283b8bc74d77671f7f6469ab1e800f4e9b02", - "sha256:a1d49ed6f4b812dde88e937d4c2bd3f13d72c23ef7de1e17a63b7cacef4b5691", - "sha256:a3a98d70c667c957c7cd0b153d4cb5e45d43f5e2e23de73be6f7b5c883c01f72", - "sha256:ac26e43b56dbafb30256906bc763cc1f22e05825ae1ced4c6afbd0e6584f18de", - "sha256:acdfe626607a245aedca35b211f9305a9e7a33349da525bf4ef3caaec8ef51cd", - "sha256:adeac55335669a189189373c93d131ebfc2de3ec04f0d3aa7dff6661f83b89b6", - "sha256:b55cc82ba92c07af6ba619dcf70cc89f7b9626adefb87d251f80f2e77419f1da", - "sha256:b5ab6c64921802176f56c36aa67c5e6a8baf9557ec1662cb41ecdb5580b67eb9", - "sha256:b6316af233610b9868eda92cf68c016750cbf50085ac6c51faa17905ddd25605", - "sha256:b765f19e23c29b68e4f8bbadd36f1da2333ba983d8da2d6518e5f0a7eb2579c2", - "sha256:bde319602111e9acca3c4f87f4205b38ba6166004bf108de47553633f9a580fc", - "sha256:c24debeec87908a864a2b4cb700f863db9441cabacdb22dc448c5d38b55c6f62", - "sha256:cb474a06023d01ead9c072b2580c22b2691aa1cabdcc19c3171ab1fa6d8496e3", - "sha256:cc4b999718287073dccd3acb0ef1593961bd7923af08991cb3c94080db503935", - "sha256:ce65ed7ad7b6cbca06b0c011b170bd2b0bc56b0a740540e2713e5ac12d7b9b2e", - "sha256:d0328f798052a33803a77d0868c7f802e952127092c1738fc9e7bfcaac7207c5", - "sha256:d2a70e8bec768be7423d8d465858a3646b34257a20cc02fd92612f1b14931f50", - "sha256:d798de0b50efb66583fc096bcdaa852ed6ea3485a4eb610d6a634f8010d932f4", - "sha256:d80c019083506886df098b7bb0d844e19db7e226736829ef49f892ed0a070fa5", - "sha256:d9cd73f7bff5079d87c2622aa418a75d5d3cdc944d3edb905c5dfc3235466eb0", - "sha256:db32a5c2912db45e73f80107d178e30f5c48cf596762b3c60ddfebdd655385f0", - "sha256:dbd4808a209b175b5ebbac24c4798dd7511c5ee522a16f2f0eac78c717dfcdfc", - "sha256:decf9d76191bfe34835f1abd3fa8ebe8a9cd7e16300a5c7e82b18c0812bb22a2", - "sha256:df494e5a79f2ef8f81f966f787e515760e639c6319a321c16198b379c256a157", - "sha256:df7784a29b9689341c17d06d826e3b52ee59d6b6916177e4db0477be7aad5f72", - "sha256:e337737b8c9d837e5b4d9e906cc57ed7a639e16e515c8094509b17f556fdb642", - "sha256:e924040582499f7514ec64691031504e6224b5ae7224216208fc2c94f8b13c89", - "sha256:eacd9de9b5b8262818a2e1f88efbd8d523abc8453de238c5d2f6a91fa85032dd", - "sha256:ef67989d480358482830dc3bc232709804f46a61e7e9841d3f0b1c13a4735b3b", - "sha256:efe758958a7bffce68d91ade238df72667e1f18966ed7b1d3d390eead51a8903", - "sha256:f5f0a0691e39c2e7b5c0f23e6765fa6cb162dce99d9ab1897fdd0f7a4a38b6fb", - "sha256:f785d83ece0998e4ce4fadda22fa6c1ecc40e10f41617013a8726d2e9af0d98f", - "sha256:f94d8adfdec402ff97cecc243b310c01d571362ca87bcf8def8e15cb3aaac3ee", - "sha256:f9b251d3f90e125ff0d1f76257329a9190fa1bfd2157344c875580bff6dedc62", - "sha256:fbcff47f8ba82467f203037f7a30decf5c724211b224682f7236edb0dcbb5b95", - "sha256:fe03cea925d884b8f1157a7037df2f5b6a6478a64b78ee600832d8a9f044c83e" + "sha256:07019a9de859c5a29916defd1e8c7557de6491a10bf50c49ff5284e6aedf5313", + "sha256:0c96eaa30030e1cfafe533f3da8983812281235b7c50ef2a6c78ceca7aea1a0b", + "sha256:0d0f16c87c62b7a94b389ddf6a8c9d081265d788875c39f3a80108c4856eea7b", + "sha256:0e07e4b17b648c880e8e42bf1ac0a730bde114961646ae1c2ec4433f0c11ca94", + "sha256:1005921b30f4f39bf893946df6173567ff650307babb5ec04bbf64342a1f62c1", + "sha256:115346433fad2084ee3a1a925ccc0659990aa42e208ca54c278830a150a3caf3", + "sha256:11b207061f28b4b6d980239b22ab0ecfadc47846b5a3b8e79f27fcc019d02cf9", + "sha256:1ceb677fb583971351627eac70eec6763fbc889761828da7a276681b5e39742d", + "sha256:1e7468f31de61a82817f918743e5229fce774f73fad58487cdf88eef4f06d864", + "sha256:20acf84bd1ce530065f8e957e4a5878fda4bc5f18cb02659828210e1519de54e", + "sha256:21fabe58042f3e567b4edc75b2cf44cea02f228e41ac09d73de126bf685fe883", + "sha256:3198da7d7c34e29fc8c823e0c3ce6c7274aac35760de557c2017489c7d98fc5a", + "sha256:35a6b69cc44bda002705d6138346bf0a0234cbb7c26c3bf192513eb946aee6f9", + "sha256:3896bf15284dd23acab1f2e7fceb350d8da6f6f2436b922f7ec6b3de685d34ca", + "sha256:3a79c0a8bbb046add85663af85e9993b691bf20c2a109518bd35e0ce77edfe42", + "sha256:4076bfd8f1621449b19b9826848ed51bf0f2d1d38e82647c312c0730d8778903", + "sha256:40c18f96696549e73b92dc12619f07019cbf5faefc1612608f967c144816e493", + "sha256:46491b3e058de7b484e1c9fb20aa8441f06d6c9a18395d711c1c2a9ad6707d6a", + "sha256:48334a6c8afee93097eb17c0a094234dac2d88da076c8cf372e09e2a5dcc4b66", + "sha256:4851618679ca70b863ba2e7109be5f09f8fd7715ec505bd42e5a947dcfde3a45", + "sha256:49190eb2ece70313742b0ea51520340288a059674da1f39eefb589d598d9453e", + "sha256:49f886e8dcf591275c6e20915b516fd81647857566b0c0158c52df1e468849c9", + "sha256:4a706db0c3b7e4578ff34ed2b1d2507b08fd491346ffc64468786fdf1151d938", + "sha256:4ac85e760543129a1912a82438fc8075223e35eaa2d457d61cd83c27d00d17be", + "sha256:4f66a0eda48844508736e47ed476d8fdd7cdbf16a4053b5d439509a25f708504", + "sha256:4feab2dcb725eb5b4835207ecf3d370ff7ce930b253cba5e681646cb80d64c2c", + "sha256:527c68f48a91d953691291d3bce0209293aa5ad13ff05286ddb506791c331818", + "sha256:551205388d1da18a9975302c9a274ba24788f53bb9bb86187496ebf9e938916e", + "sha256:590e2d733a82ecf004c5c531cbef0d6be328e93adec960024eb213f10cb9503e", + "sha256:59db8e6888d5302b8dbca0c1026ddabe99d81d67cdc101941519e13ffc9050fe", + "sha256:60165b8bc260f453321004b193770a66cc1b1a5c57c07d4b8dcc96839e7ad578", + "sha256:65a0168691373e08d869d48b62c8bed0af0cdaef19c76e11ad73b43901bbdb5a", + "sha256:65e0467f90f2acf3bc83bbfeedece8f1fd84df8add1a54e9600ed7b7b5debdb0", + "sha256:691a3b498fdebef63308e8967bb598cfd326c56d628da82b799dd181bace4503", + "sha256:69f628d2da1489b27959f4d63fdb326781fe484944dce94abbf919e416c54abe", + "sha256:6e100c6c7d9e9d469009fd55cc4d7ad168d67d40758865c50da713f7ada491e5", + "sha256:6f8136bde8dfa4477c6a85c79a366581b4a505b51a52b669318fb631d3f4f638", + "sha256:76259901cf1ac3db65e7e6dff04775b626d0715f9b51d92b447351144c756a82", + "sha256:7694f109867ee428c21b85ae19fd31d164c691eb45cc95c561cfdeba237a12e3", + "sha256:77390496f2f32437a721c854897f889abefae0f3009daf90a2f703508d96c920", + "sha256:787532f00543a21b8f4ec3050b4e01b8fe437797903c0156a0b03dfca5e1ba6c", + "sha256:7d5226c70af3ad9569ccc4ccc04ab65be79eeb22c87d7ae789c89e62ef76bbd6", + "sha256:83e7154aa0d17f5c93d27ac01088fd9ab6673e7bab1acbd07cd7a865b980c045", + "sha256:84937d00e2ea03616c40977de20189fa13a9213e5744a3c6afa0e7dd9141d69c", + "sha256:8721f8bedaa722c3c483cc06a1399cbfdb280eadf443aa5d324b0203cef2a75f", + "sha256:8b569f4f511b59518ba6719feb5b8bf0a5d4115e6ac903c89e10a8a9ac656017", + "sha256:8de5328d91859b461899497980d4cc8269e84e2d18640f6ac643886fda9000bf", + "sha256:9060589d0acad1fca048861fa9ee3e8ed060f67894fa885969648ab6e9e99a54", + "sha256:98d8dc1e8133f86d916125deca9780d791b22645f0d62bafe1452d1cd5eac631", + "sha256:9fe17744d60fc404ac61f824118e1e15ce3c2e92eced9b8e22f3c7847acafbf2", + "sha256:a0c5e271058d148d730219ca4f33c5d841c6bd46e05b0da60fea7b516906ccd3", + "sha256:ab79cc13307065a0b3ef087f09f0509996fc605d35d6642bb28e5d85b2648e1e", + "sha256:ab9ccf26cb3fa32747ba2a637a189d2d42386a2fc4afc10dbc7f85922dd23b0f", + "sha256:adb6b5d07d17c32f9d34c9dd4a693637a72323cfcb1f8a52d57033ab2dd21e99", + "sha256:b2bdb038b3f5c284e3919218c580dedc95f592c417a358361450b9519b22f7a8", + "sha256:c23a442973dba3646811c284fce3dddd7fe5c2bd674ac73a122198e8218d6115", + "sha256:c30115cecaf25fdcb67cc71c669d08425207f62d7a2f6d5416057c1460529216", + "sha256:c33ea7c55a73be343f02361795caf52a187357ea07708fb1cae6661ee1d689c8", + "sha256:ccbeaf5b18b173b9d78e332e017b30ba8bedcf03cdce1d13490b82a3f421bc98", + "sha256:ccd6774aa7bebdf9ca608bb0839318757a71b8e0d2cf7b10c002bc8790bd343e", + "sha256:d886de2ea81f513ba2d6820451d33b767a97c37867ba688d42e164b2dbca1362", + "sha256:db64a20e78969fc66665d2e5fc96cb4f4dc80f2137d8fed4b5a650ad569bb60f", + "sha256:db818e33599f7b2e4c6507f2b2c24f45ff539a1b6e4e09163bb6f3cfb4616ca7", + "sha256:dce1c56beef74d9c799a6ed94001693232a1402138292353a8ce302b64f457d9", + "sha256:de38b0b5b86e57efb129d179854e78b65cb8e294a8c75560877869c43aa2415a", + "sha256:de479e30abd2dfd49fdad3bd6953f2d930a45380be5143c0c9f7a1215cffc8cc", + "sha256:df09c80f4bc2bc2efde309af383c3fe8fd8c51fe0519edb350b9c9e0af43ffa4", + "sha256:df6b254e55c8ac2362afaa651e3e53453aa19a095570792346245773b434176e", + "sha256:e58c5d07b1f78dd4cb180c5b3b82465cd281aaeee8aafea0e5d72a4b97922cb1", + "sha256:e5cc288111c450c0a54a74475591b206d3b1cb47dc71bb6200f6be8b1337184c", + "sha256:eee724176b5bc50ee64905f559345448119b860a30b9489bd7a073f61baf925f", + "sha256:f16d1940c0cbc342f1d29d6212a006d172be616d2942c5c41966e8a3ce4c3be1", + "sha256:f25906e4a72d9833e81717c39a39dee7297ff5cb44957d06d177a2ab8ef2ef7f", + "sha256:f3294ce265011547630a59c20085fcb6af8cc5fa1fa44a203251f7d86cd5d913", + "sha256:f337486742c700b102d640830aab3faf2848bed966b479a39e6783edd4ab1c6c", + "sha256:f51c9d173e5fa4b12d06ddca09a41cabbdeb660471dbe55432423eec095709ab", + "sha256:f68025d6ba1816428b7de615c80f61cb03d5b7061158d4ced7696657a64aa59c", + "sha256:f74f6ffdc633aefecbc80282242a5395058db9d1247fa7dd2f070ef84dc82583", + "sha256:f864b412557e69a6b953d62c01a0ed0ee342666298aa7f2a29af526bfa80f6e9", + "sha256:f923e94e93a37fd990e8336e0b9bedea533e7cbed14e0c572bf9357ef2a70681", + "sha256:faa3dd7f4620ab5e5da7a0789d0aac78a9ad0376f102409d442ec5a4179e200a", + "sha256:fd2bb86f40962d53a91def15a2f7684c62e081a7b96ec74ed0259c34b15973b9" ], "markers": "python_version >= '3.9'", - "version": "==1.15.3" + "version": "==1.15.4" } } } From da6d33c189228415d7736a33b0a0c8ea62c3e0d9 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 10:17:16 -0400 Subject: [PATCH 03/34] added loguru logging config Signed-off-by: John DeAngelis --- backend/ops_api/ops/__init__.py | 40 +++++++++------------------------ 1 file changed, 11 insertions(+), 29 deletions(-) diff --git a/backend/ops_api/ops/__init__.py b/backend/ops_api/ops/__init__.py index f94a7eaf62..ac32b59c95 100644 --- a/backend/ops_api/ops/__init__.py +++ b/backend/ops_api/ops/__init__.py @@ -1,10 +1,11 @@ -import logging.config import os +import sys from authlib.integrations.flask_client import OAuth from flask import Blueprint, Flask, current_app, request from flask_cors import CORS from flask_jwt_extended import current_user, verify_jwt_in_request +from loguru import logger from sqlalchemy import event from sqlalchemy.orm import Session @@ -18,32 +19,13 @@ from ops_api.ops.utils.core import is_fake_user, is_unit_test -def configure_logging(log_level: str = "INFO") -> None: - logging.config.dictConfig( - { - "version": 1, - "formatters": { - "default": { - "format": "[%(asctime)s] %(levelname)s in %(module)s: %(message)s", - } - }, - "handlers": { - "wsgi": { - "class": "logging.StreamHandler", - "stream": "ext://sys.stdout", - "formatter": "default", - } - }, - "root": {"level": f"{log_level}", "handlers": ["wsgi"]}, - } - ) - - def create_app() -> Flask: from ops_api.ops.utils.core import is_unit_test log_level = "INFO" if not is_unit_test() else "DEBUG" - configure_logging(log_level) # should be configured before any access to app.logger + logger.add(sys.stdout, format="{time} {level} {message}", level=log_level) + logger.add(sys.stderr, format="{time} {level} {message}", level=log_level) + app = Flask(__name__) app.config.from_object("ops_api.ops.environment.default_settings") @@ -115,7 +97,7 @@ def before_request(): @app.after_request def after_request(response): - log_response(app.logger, response) + log_response(response) return response register_error_handlers(app) @@ -123,7 +105,7 @@ def after_request(response): return app -def log_response(log, response): +def log_response(response): if request.url != request.url_root: response_data = { "method": request.method, @@ -133,10 +115,10 @@ def log_response(log, response): "json": response.get_data(as_text=True), "response_headers": response.headers, } - log.info(f"Response: {response_data}") + logger.info(f"Response: {response_data}") -def log_request(log: logging.Logger): +def log_request(): request_data = { "method": request.method, "url": request.url, @@ -144,11 +126,11 @@ def log_request(log: logging.Logger): "args": request.args, "headers": request.headers, } - log.info(f"Request: {request_data}") + logger.info(f"Request: {request_data}") def before_request_function(app: Flask, request: request): - log_request(app.logger) + log_request() # check that the UserSession is valid all_valid_endpoints = [ rule.endpoint From 50cdfa8bdd7823938e29cbcf5c4f929d0e52d125 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 10:49:00 -0400 Subject: [PATCH 04/34] set timezone and enhance log config Signed-off-by: John DeAngelis --- backend/ops_api/ops/__init__.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/backend/ops_api/ops/__init__.py b/backend/ops_api/ops/__init__.py index ac32b59c95..89cc7e2839 100644 --- a/backend/ops_api/ops/__init__.py +++ b/backend/ops_api/ops/__init__.py @@ -1,5 +1,6 @@ import os import sys +import time from authlib.integrations.flask_client import OAuth from flask import Blueprint, Flask, current_app, request @@ -18,13 +19,25 @@ from ops_api.ops.urls import register_api from ops_api.ops.utils.core import is_fake_user, is_unit_test +# Set the timezone to UTC +os.environ["TZ"] = "UTC" +time.tzset() + def create_app() -> Flask: from ops_api.ops.utils.core import is_unit_test log_level = "INFO" if not is_unit_test() else "DEBUG" - logger.add(sys.stdout, format="{time} {level} {message}", level=log_level) - logger.add(sys.stderr, format="{time} {level} {message}", level=log_level) + + # logger configuration + format = ( + "{time:YYYY-MM-DD HH:mm:ss} | " + "{level: <8} | " + "{name}:{function}:{line} | " + "{message}" + ) + logger.add(sys.stdout, format=format, level=log_level) + logger.add(sys.stderr, format=format, level=log_level) app = Flask(__name__) From 213dc136a33ba96aae225178e931d13550fdbddf Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 16:21:51 -0400 Subject: [PATCH 05/34] setup DB metadata in fixture and create DB session Signed-off-by: John DeAngelis --- backend/data_tools/tests/conftest.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/backend/data_tools/tests/conftest.py b/backend/data_tools/tests/conftest.py index 2e4c00024d..722986ae1f 100644 --- a/backend/data_tools/tests/conftest.py +++ b/backend/data_tools/tests/conftest.py @@ -1,6 +1,9 @@ import pytest from sqlalchemy import create_engine, text from sqlalchemy.exc import OperationalError +from sqlalchemy.orm import Session, scoped_session, sessionmaker + +from models import BaseModel def is_responsive(db): @@ -20,3 +23,21 @@ def db_service(docker_ip, docker_services): engine = create_engine(connection_string, echo=True, future=True) docker_services.wait_until_responsive(timeout=30.0, pause=0.1, check=lambda: is_responsive(engine)) return engine + + +@pytest.fixture() +def loaded_db(db_service) -> Session: + """Get SQLAlchemy Session.""" + + BaseModel.metadata.drop_all(db_service) + BaseModel.metadata.create_all(db_service) + + session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=db_service)) + + yield session + + # cleanup + session.rollback() + + session.commit() + session.close() From e2c1b25c18eeb7d0d369c97f0a5791b44f0a29d5 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 16:24:15 -0400 Subject: [PATCH 06/34] setup timezone and logger config Signed-off-by: John DeAngelis --- backend/data_tools/src/load_cans/main.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/backend/data_tools/src/load_cans/main.py b/backend/data_tools/src/load_cans/main.py index 23c9bf0d07..4954164560 100644 --- a/backend/data_tools/src/load_cans/main.py +++ b/backend/data_tools/src/load_cans/main.py @@ -1,4 +1,6 @@ +import os import sys +import time import click from data_tools.src.azure_utils.utils import get_csv @@ -6,6 +8,20 @@ from loguru import logger from sqlalchemy import text +# Set the timezone to UTC +os.environ["TZ"] = "UTC" +time.tzset() + +# logger configuration +format = ( + "{time:YYYY-MM-DD HH:mm:ss} | " + "{level: <8} | " + "{name}:{function}:{line} | " + "{message}" +) +logger.add(sys.stdout, format=format, level="DEBUG") +logger.add(sys.stderr, format=format, level="DEBUG") + @click.command() @click.option("--env", help="The environment to use.") @@ -19,8 +35,6 @@ def main( """ Main entrypoint for the script. """ - logger.add(sys.stdout, format="{time} {level} {message}", level="DEBUG") - logger.add(sys.stderr, format="{time} {level} {message}", level="DEBUG") logger.debug(f"Environment: {env}") logger.debug(f"Input CSV: {input_csv}") logger.debug(f"Output CSV: {output_csv}") @@ -42,9 +56,6 @@ def main( logger.info(f"Loaded CSV file from {input_csv}.") - for row in csv_f: - logger.debug(f"row={row}") - logger.info("Finished the ETL process.") if __name__ == "__main__": From e55b0335174f4c45d767e5a335861802053b2265 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 16:25:07 -0400 Subject: [PATCH 07/34] add initial tests Signed-off-by: John DeAngelis --- .../tests/ingest_cans/test_ingest_cans.py | 34 --- .../tests/load_cans/test_load_cans.py | 238 ++++++++++++++++++ 2 files changed, 238 insertions(+), 34 deletions(-) delete mode 100644 backend/data_tools/tests/ingest_cans/test_ingest_cans.py create mode 100644 backend/data_tools/tests/load_cans/test_load_cans.py diff --git a/backend/data_tools/tests/ingest_cans/test_ingest_cans.py b/backend/data_tools/tests/ingest_cans/test_ingest_cans.py deleted file mode 100644 index d845b9edaf..0000000000 --- a/backend/data_tools/tests/ingest_cans/test_ingest_cans.py +++ /dev/null @@ -1,34 +0,0 @@ -from unittest import mock - -import sqlalchemy.engine -from data_tools.environment.dev import DevConfig -from data_tools.environment.pytest import PytestConfig -from data_tools.src.import_static_data.import_data import get_config, import_data, init_db, load_new_data - - -def test_init_db(db_service): - engine, metadata_obj = init_db(PytestConfig(), db_service) - assert isinstance(engine, sqlalchemy.engine.Engine) - assert isinstance(metadata_obj, sqlalchemy.MetaData) - - -def test_get_config_default(): - assert isinstance(get_config(), DevConfig) - - - -def test_load_new_data_empty(): - mock_conn = mock.MagicMock() - load_new_data(mock_conn, {}) - assert mock_conn.execute.call_count == 0 - - -def test_import_data(mocker): - mock_engine = mocker.MagicMock() - mock_load = mocker.patch( - "data_tools.src.import_static_data.import_data.load_new_data" - ) - - import_data(mock_engine, {}) - - assert mock_load.called diff --git a/backend/data_tools/tests/load_cans/test_load_cans.py b/backend/data_tools/tests/load_cans/test_load_cans.py new file mode 100644 index 0000000000..301c38f1d3 --- /dev/null +++ b/backend/data_tools/tests/load_cans/test_load_cans.py @@ -0,0 +1,238 @@ +import csv + +import pytest +import sqlalchemy.engine +from data_tools.environment.dev import DevConfig +from data_tools.environment.pytest import PytestConfig +from data_tools.src.import_static_data.import_data import get_config, init_db +from data_tools.src.load_cans.utils import ( + CANData, + create_can_data, + create_models, + persist_models, + validate_all, + validate_data, +) +from sqlalchemy.sql import select + +from models import CAN, CANFundingDetails, CANFundingSource, CANMethodOfTransfer, Division, Portfolio + + +def test_init_db(db_service): + engine, metadata_obj = init_db(PytestConfig(), db_service) + assert isinstance(engine, sqlalchemy.engine.Engine) + assert isinstance(metadata_obj, sqlalchemy.MetaData) + + +def test_get_config_default(): + assert isinstance(get_config(), DevConfig) + + +def test_create_can_data(): + test_data = list(csv.DictReader(open("test_csv/can.tsv"), dialect="excel-tab")) + + assert len(test_data) == 17 + + assert create_can_data(test_data[0]).SYS_CAN_ID == 500 + assert create_can_data(test_data[0]).CAN_NBR == "G99HRF2" + assert create_can_data(test_data[0]).CAN_DESCRIPTION == "Healthy Marriages Responsible Fatherhood - OPRE" + assert create_can_data(test_data[0]).FUND == "AAXXXX20231DAD" + assert create_can_data(test_data[0]).ALLOWANCE == "0000000001" + assert create_can_data(test_data[0]).ALLOTMENT_ORG == "YZC6S1JUGUN" + assert create_can_data(test_data[0]).SUB_ALLOWANCE == "9KRZ2ND" + assert create_can_data(test_data[0]).CURRENT_FY_FUNDING_YTD == 880000.0 + assert create_can_data(test_data[0]).APPROP_PREFIX == "XX" + assert create_can_data(test_data[0]).APPROP_POSTFIX == "XXXX" + assert create_can_data(test_data[0]).APPROP_YEAR == "23" + assert create_can_data(test_data[0]).PORTFOLIO == "HMRF" + assert create_can_data(test_data[0]).FUNDING_SOURCE is None + assert create_can_data(test_data[0]).METHOD_OF_TRANSFER == "DIRECT" + assert create_can_data(test_data[0]).NICK_NAME == "HMRF-OPRE" + +def test_validate_data(): + test_data = list(csv.DictReader(open("test_csv/can.tsv"), dialect="excel-tab")) + assert len(test_data) == 17 + count = sum(1 for data in test_data if validate_data(create_can_data(data))) + assert count == 10 + +def test_validate_all(): + test_data = list(csv.DictReader(open("test_csv/can.tsv"), dialect="excel-tab")) + assert len(test_data) == 17 + can_data = [create_can_data(data) for data in test_data] + assert validate_all(can_data) == False + +def test_create_models_no_can_nbr(): + with pytest.raises(ValueError): + CANData( + SYS_CAN_ID=500, + CAN_NBR=None, + CAN_DESCRIPTION="Healthy Marriages Responsible Fatherhood - OPRE", + FUND="AAXXXX20231DAD", + ALLOWANCE="0000000001", + ALLOTMENT_ORG="YZC6S1JUGUN", + SUB_ALLOWANCE="9KRZ2ND", + CURRENT_FY_FUNDING_YTD=880000.0, + APPROP_PREFIX="XX", + APPROP_POSTFIX="XXXX", + APPROP_YEAR="23", + PORTFOLIO="HMRF", + FUNDING_SOURCE="OPRE", + METHOD_OF_TRANSFER="DIRECT", + NICK_NAME="HMRF-OPRE", + ) + + with pytest.raises(ValueError): + CANData( + SYS_CAN_ID=500, + CAN_NBR="", + CAN_DESCRIPTION="Healthy Marriages Responsible Fatherhood - OPRE", + FUND="AAXXXX20231DAD", + ALLOWANCE="0000000001", + ALLOTMENT_ORG="YZC6S1JUGUN", + SUB_ALLOWANCE="9KRZ2ND", + CURRENT_FY_FUNDING_YTD=880000.0, + APPROP_PREFIX="XX", + APPROP_POSTFIX="XXXX", + APPROP_YEAR="23", + PORTFOLIO="HMRF", + FUNDING_SOURCE="OPRE", + METHOD_OF_TRANSFER="DIRECT", + NICK_NAME="HMRF-OPRE", + ) + +def test_create_models(): + portfolios = [ + Portfolio( + abbreviation="HMRF", + name="Healthy Marriages Responsible Fatherhood", + ), + Portfolio( + abbreviation="CC", + name="Child Care", + ), + ] + + + data = CANData( + SYS_CAN_ID=500, + CAN_NBR="G99HRF2", + CAN_DESCRIPTION="Healthy Marriages Responsible Fatherhood - OPRE", + FUND="AAXXXX20231DAD", + ALLOWANCE="0000000001", + ALLOTMENT_ORG="YZC6S1JUGUN", + SUB_ALLOWANCE="9KRZ2ND", + CURRENT_FY_FUNDING_YTD=880000.0, + APPROP_PREFIX="XX", + APPROP_POSTFIX="XXXX", + APPROP_YEAR="23", + PORTFOLIO="HMRF", + FUNDING_SOURCE="OPRE", + METHOD_OF_TRANSFER="DIRECT", + NICK_NAME="HMRF-OPRE", + ) + + models = create_models(data, portfolios) + + assert len(models) == 2 + + can_model = next(m for m in models if isinstance(m, CAN)) + assert can_model.id == 500 + assert can_model.number == "G99HRF2" + assert can_model.description == "Healthy Marriages Responsible Fatherhood - OPRE" + assert can_model.nick_name == "HMRF-OPRE" + assert can_model.portfolio == next(p for p in portfolios if p.abbreviation == "HMRF") + assert can_model.funding_details == next(m for m in models if isinstance(m, CANFundingDetails)) + + funding_details_model = next(m for m in models if isinstance(m, CANFundingDetails)) + assert funding_details_model.fiscal_year == 2023 + assert funding_details_model.fund_code == "AAXXXX20231DAD" + assert funding_details_model.allowance == "0000000001" + assert funding_details_model.sub_allowance == "9KRZ2ND" + assert funding_details_model.allotment == "YZC6S1JUGUN" + assert funding_details_model.appropriation == "XX-23-XXXX" + assert funding_details_model.method_of_transfer == CANMethodOfTransfer.DIRECT + assert funding_details_model.funding_source == CANFundingSource.OPRE + assert funding_details_model.active_period == 1 + assert funding_details_model.obligate_by == 2024 + +def test_persist_models(loaded_db): + division = Division( + name="Child Care", + abbreviation="CC", + ) + loaded_db.add(division) + loaded_db.commit() + + portfolios = [ + Portfolio( + abbreviation="HMRF", + name="Healthy Marriages Responsible Fatherhood", + division_id=division.id, + ), + Portfolio( + abbreviation="CC", + name="Child Care", + division_id=division.id, + ), + ] + + loaded_db.add_all(portfolios) + loaded_db.commit() + + data_1 = CANData( + SYS_CAN_ID=500, + CAN_NBR="G99HRF2", + CAN_DESCRIPTION="Healthy Marriages Responsible Fatherhood - OPRE", + FUND="AAXXXX20231DAD", + ALLOWANCE="0000000001", + ALLOTMENT_ORG="YZC6S1JUGUN", + SUB_ALLOWANCE="9KRZ2ND", + CURRENT_FY_FUNDING_YTD=880000.0, + APPROP_PREFIX="XX", + APPROP_POSTFIX="XXXX", + APPROP_YEAR="23", + PORTFOLIO="HMRF", + FUNDING_SOURCE="OPRE", + METHOD_OF_TRANSFER="DIRECT", + NICK_NAME="HMRF-OPRE", + ) + + data_2 = CANData( + SYS_CAN_ID=501, + CAN_NBR="G99HRF3", + CAN_DESCRIPTION="Healthy Marriages Responsible Fatherhood - OPRE", + FUND="AAXXXX20231DAD", + ALLOWANCE="0000000001", + ALLOTMENT_ORG="YZC6S1JUGUN", + SUB_ALLOWANCE="9KRZ2ND", + CURRENT_FY_FUNDING_YTD=880000.0, + APPROP_PREFIX="XX", + APPROP_POSTFIX="XXXX", + APPROP_YEAR="23", + PORTFOLIO="CC", + FUNDING_SOURCE="OPRE", + METHOD_OF_TRANSFER="DIRECT", + NICK_NAME="HMRF-OPRE", + ) + + models = create_models(data_1, portfolios) + create_models(data_2, portfolios) + + persist_models(models, loaded_db) + + can_1 = loaded_db.get(CAN, 500) + assert can_1.number == "G99HRF2" + assert can_1.description == "Healthy Marriages Responsible Fatherhood - OPRE" + assert can_1.nick_name == "HMRF-OPRE" + assert can_1.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() + assert can_1.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar() + + # Cleanup + for model in models: + loaded_db.delete(model) + loaded_db.commit() + + # TODO: Need to add cascade delete to models + # loaded_db.delete(division) + # for portfolio in portfolios: + # loaded_db.delete(portfolio) + # loaded_db.commit() From 9d1be5fec20929807da6d998300c3b994a22b26f Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 16:26:07 -0400 Subject: [PATCH 08/34] add initial can code Signed-off-by: John DeAngelis --- backend/data_tools/src/load_cans/utils.py | 158 ++++++++++++++++++++++ 1 file changed, 158 insertions(+) create mode 100644 backend/data_tools/src/load_cans/utils.py diff --git a/backend/data_tools/src/load_cans/utils.py b/backend/data_tools/src/load_cans/utils.py new file mode 100644 index 0000000000..6b08bbdfab --- /dev/null +++ b/backend/data_tools/src/load_cans/utils.py @@ -0,0 +1,158 @@ +from dataclasses import dataclass, field +from typing import List + +from loguru import logger + +from models import CAN, BaseModel, CANFundingDetails, CANFundingSource, CANMethodOfTransfer, Portfolio + + +@dataclass +class CANData: + """ + Dataclass to represent a CAN data row. + """ + SYS_CAN_ID: int + CAN_NBR: str + CAN_DESCRIPTION: str + FUND: str + ALLOWANCE: str + ALLOTMENT_ORG: str + SUB_ALLOWANCE: str + CURRENT_FY_FUNDING_YTD: float + APPROP_PREFIX: str + APPROP_POSTFIX: str + APPROP_YEAR: str + PORTFOLIO: str + FUNDING_SOURCE: str + METHOD_OF_TRANSFER: str + NICK_NAME: str + + def __post_init__(self): + if not self.CAN_NBR: + raise ValueError("CAN_NBR is required.") + + self.SYS_CAN_ID = int(self.SYS_CAN_ID) if self.SYS_CAN_ID else None + self.CAN_NBR = str(self.CAN_NBR) + self.CAN_DESCRIPTION = str(self.CAN_DESCRIPTION) if self.CAN_DESCRIPTION else None + self.FUND = str(self.FUND) if self.FUND else None + self.ALLOWANCE = str(self.ALLOWANCE) if self.ALLOWANCE else None + self.ALLOTMENT_ORG = str(self.ALLOTMENT_ORG) if self.ALLOTMENT_ORG else None + self.SUB_ALLOWANCE = str(self.SUB_ALLOWANCE) if self.SUB_ALLOWANCE else None + self.CURRENT_FY_FUNDING_YTD = float(self.CURRENT_FY_FUNDING_YTD) if self.CURRENT_FY_FUNDING_YTD else None + self.APPROP_PREFIX = str(self.APPROP_PREFIX) if self.APPROP_PREFIX else None + self.APPROP_POSTFIX = str(self.APPROP_POSTFIX) if self.APPROP_POSTFIX else None + self.APPROP_YEAR = str(self.APPROP_YEAR) if self.APPROP_YEAR else None + self.PORTFOLIO = str(self.PORTFOLIO) if self.PORTFOLIO else None + self.FUNDING_SOURCE = str(self.FUNDING_SOURCE) if self.FUNDING_SOURCE else None + self.METHOD_OF_TRANSFER = str(self.METHOD_OF_TRANSFER) if self.METHOD_OF_TRANSFER else None + self.NICK_NAME = str(self.NICK_NAME) if self.NICK_NAME else None + + +def create_can_data(data: dict) -> CANData: + """ + Convert a dictionary to a CanData dataclass instance. + + :param data: The dictionary to convert. + + :return: A CanData dataclass instance. + """ + return CANData(**data) + +def validate_data(data: CANData) -> bool: + """ + Validate the data in a CanData instance. + + :param data: The CanData instance to validate. + + :return: True if the data is valid, False otherwise. + """ + return all([ + data.CAN_NBR is not None, + data.PORTFOLIO is not None, + data.FUNDING_SOURCE is not None, + data.METHOD_OF_TRANSFER is not None, + ]) + +def validate_all(data: List[CANData]) -> bool: + """ + Validate a list of CanData instances. + + :param data: The list of CanData instances to validate. + + :return: A list of valid CanData instances. + """ + return sum(1 for d in data if validate_data(d)) == len(data) + +def create_models(data: CANData, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: + """ + Convert a CanData instance to a list of BaseModel instances. + + :param data: The CanData instance to convert. + :param portfolio_ref_data: A list of Portfolio instances to use as reference data. + + :return: A list of BaseModel instances. + """ + logger.debug(f"Creating models for {data}") + + models: List[BaseModel] = [] + try: + portfolio = next(p for p in portfolio_ref_data if p.abbreviation == data.PORTFOLIO) + if not portfolio: + raise ValueError(f"Portfolio not found for {data.PORTFOLIO}") + + funding_details = CANFundingDetails( + fiscal_year=int(data.FUND[6:10]), + fund_code=data.FUND, + allowance=data.ALLOWANCE, + sub_allowance=data.SUB_ALLOWANCE, + allotment=data.ALLOTMENT_ORG, + appropriation=data.APPROP_PREFIX + "-" + data.APPROP_YEAR[0:2] + "-" + data.APPROP_POSTFIX, + method_of_transfer=CANMethodOfTransfer[data.METHOD_OF_TRANSFER], + funding_source=CANFundingSource[data.FUNDING_SOURCE], + ) + + can = CAN( + id=data.SYS_CAN_ID, + number=data.CAN_NBR, + description=data.CAN_DESCRIPTION, + nick_name=data.NICK_NAME, + ) + + can.funding_details = funding_details + can.portfolio = portfolio + + models.append(can) + models.append(funding_details) + except Exception as e: + logger.error(f"Error creating models for {data}") + raise e + return models + +def create_all_models(data: List[CANData], portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: + """ + Convert a list of CanData instances to a list of BaseModel instances. + + :param data: The list of CanData instances to convert. + :param portfolio_ref_data: A list of Portfolio instances to use as reference data. + + :return: A list of BaseModel instances. + """ + return [m for d in data for m in create_models(d, portfolio_ref_data)] + +def persist_models(models: List[BaseModel], session) -> None: + """ + Persist a list of models to the database. + + :param models: The list of models to persist. + :param session: The database session to use. + """ + for model in models: + obj = session.get(type(model), model.id) + + if obj: + session.merge(model) + else: + session.add(model) + session.commit() + logger.info(f"Persisted {len(models)} models.") + return None From fbb4d0202866a82421a23421dd1e5f4b65e4bbce Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Thu, 17 Oct 2024 16:26:26 -0400 Subject: [PATCH 09/34] add initial can code Signed-off-by: John DeAngelis --- backend/data_tools/tests/{ingest_cans => load_cans}/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename backend/data_tools/tests/{ingest_cans => load_cans}/__init__.py (100%) diff --git a/backend/data_tools/tests/ingest_cans/__init__.py b/backend/data_tools/tests/load_cans/__init__.py similarity index 100% rename from backend/data_tools/tests/ingest_cans/__init__.py rename to backend/data_tools/tests/load_cans/__init__.py From ea31d78ed4a055de51d667a88ce1c472b3a35810 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 15:54:42 -0400 Subject: [PATCH 10/34] update lock file Signed-off-by: John DeAngelis --- backend/ops_api/Pipfile.lock | 338 ++++++++++++++--------------------- 1 file changed, 134 insertions(+), 204 deletions(-) diff --git a/backend/ops_api/Pipfile.lock b/backend/ops_api/Pipfile.lock index 06c3d2a34f..0a1e71c0f0 100644 --- a/backend/ops_api/Pipfile.lock +++ b/backend/ops_api/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "1afb6ea63e00f7597489adc09dbf6c43e4a8fd7d609253516f083f1875ba0142" + "sha256": "793f7920360f5776da2c1fb5c80ad7b1f3bd60e77cd6dea3eddbf64804d486cc" }, "pipfile-spec": 6, "requires": { @@ -355,85 +355,6 @@ "markers": "python_version >= '3.7' and python_version < '4'", "version": "==4.6.0" }, - "greenlet": { - "hashes": [ - "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", - "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7", - "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", - "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", - "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", - "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563", - "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83", - "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", - "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", - "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa", - "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", - "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", - "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", - "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22", - "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9", - "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0", - "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba", - "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3", - "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", - "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", - "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291", - "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", - "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", - "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", - "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", - "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef", - "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c", - "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", - "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c", - "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", - "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", - "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8", - "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d", - "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", - "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145", - "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80", - "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", - "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e", - "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", - "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1", - "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef", - "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc", - "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", - "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120", - "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437", - "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd", - "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981", - "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", - "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", - "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798", - "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7", - "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", - "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", - "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", - "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af", - "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", - "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", - "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42", - "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e", - "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81", - "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", - "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617", - "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc", - "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de", - "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111", - "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", - "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", - "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6", - "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", - "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", - "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803", - "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", - "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f" - ], - "markers": "python_version < '3.13' and (platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32'))))))", - "version": "==3.1.1" - }, "gunicorn": { "hashes": [ "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9", @@ -475,6 +396,15 @@ "markers": "python_version >= '3.7'", "version": "==3.1.4" }, + "loguru": { + "hashes": [ + "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb", + "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac" + ], + "index": "pypi", + "markers": "python_version >= '3.5'", + "version": "==0.7.2" + }, "mako": { "hashes": [ "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a", @@ -485,71 +415,71 @@ }, "markupsafe": { "hashes": [ - "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", - "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", - "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", - "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", - "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", - "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", - "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", - "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", - "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", - "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", - "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", - "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", - "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", - "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", - "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", - "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", - "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", - "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", - "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", - "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", - "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", - "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", - "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", - "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", - "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", - "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", - "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", - "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", - "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", - "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", - "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", - "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", - "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", - "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", - "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", - "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", - "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", - "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", - "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", - "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", - "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", - "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", - "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", - "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", - "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", - "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", - "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", - "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", - "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", - "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", - "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", - "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", - "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", - "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", - "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", - "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", - "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", - "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", - "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", - "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", - "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50" + "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396", + "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38", + "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a", + "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8", + "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b", + "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad", + "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a", + "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a", + "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da", + "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6", + "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8", + "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344", + "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a", + "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8", + "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5", + "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7", + "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170", + "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132", + "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9", + "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd", + "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9", + "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346", + "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc", + "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589", + "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5", + "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915", + "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295", + "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453", + "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea", + "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b", + "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d", + "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b", + "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4", + "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b", + "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7", + "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf", + "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f", + "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91", + "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd", + "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50", + "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b", + "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583", + "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a", + "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984", + "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c", + "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c", + "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25", + "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa", + "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4", + "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3", + "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97", + "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1", + "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd", + "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772", + "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a", + "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729", + "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca", + "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6", + "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635", + "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b", + "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==3.0.2" + "version": "==3.0.1" }, "marshmallow": { "hashes": [ @@ -1420,71 +1350,71 @@ }, "markupsafe": { "hashes": [ - "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", - "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", - "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", - "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", - "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", - "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", - "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", - "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", - "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", - "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", - "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", - "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", - "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", - "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", - "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", - "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", - "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", - "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", - "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", - "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", - "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", - "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", - "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", - "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", - "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", - "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", - "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", - "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", - "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", - "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", - "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", - "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", - "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", - "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", - "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", - "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", - "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", - "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", - "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", - "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", - "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", - "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", - "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", - "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", - "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", - "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", - "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", - "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", - "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", - "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", - "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", - "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", - "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", - "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", - "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", - "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", - "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", - "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", - "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", - "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", - "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50" + "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396", + "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38", + "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a", + "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8", + "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b", + "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad", + "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a", + "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a", + "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da", + "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6", + "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8", + "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344", + "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a", + "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8", + "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5", + "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7", + "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170", + "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132", + "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9", + "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd", + "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9", + "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346", + "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc", + "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589", + "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5", + "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915", + "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295", + "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453", + "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea", + "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b", + "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d", + "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b", + "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4", + "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b", + "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7", + "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf", + "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f", + "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91", + "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd", + "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50", + "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b", + "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583", + "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a", + "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984", + "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c", + "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c", + "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25", + "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa", + "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4", + "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3", + "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97", + "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1", + "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd", + "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772", + "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a", + "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729", + "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca", + "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6", + "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635", + "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b", + "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==3.0.2" + "version": "==3.0.1" }, "matplotlib-inline": { "hashes": [ From af22b3f0fbbabcd8821707616ceac75de725a4a1 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 15:55:25 -0400 Subject: [PATCH 11/34] move to separate test package Signed-off-by: John DeAngelis --- backend/data_tools/tests/common/__init__.py | 0 backend/data_tools/tests/common/test_utils.py | 10 ++++++++++ 2 files changed, 10 insertions(+) create mode 100644 backend/data_tools/tests/common/__init__.py create mode 100644 backend/data_tools/tests/common/test_utils.py diff --git a/backend/data_tools/tests/common/__init__.py b/backend/data_tools/tests/common/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/data_tools/tests/common/test_utils.py b/backend/data_tools/tests/common/test_utils.py new file mode 100644 index 0000000000..947f5b7900 --- /dev/null +++ b/backend/data_tools/tests/common/test_utils.py @@ -0,0 +1,10 @@ +import sqlalchemy +from data_tools.environment.pytest import PytestConfig +from data_tools.src.common.utils import init_db + + +def test_init_db(db_service): + _, engine = db_service + engine, metadata_obj = init_db(PytestConfig(), engine) + assert isinstance(engine, sqlalchemy.engine.Engine) + assert isinstance(metadata_obj, sqlalchemy.MetaData) From 788de85d6f4f84074014d87afd2eb2c62aec0d29 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 16:00:22 -0400 Subject: [PATCH 12/34] copy dict values into list; wrap marshmallow schema safe user in a try block Signed-off-by: John DeAngelis --- backend/models/base.py | 35 ++++++++++++++++++++++++----------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/backend/models/base.py b/backend/models/base.py index f04201e243..ccf6568052 100644 --- a/backend/models/base.py +++ b/backend/models/base.py @@ -6,11 +6,12 @@ import marshmallow import sqlalchemy +from loguru import logger from marshmallow import fields from marshmallow.exceptions import MarshmallowError from marshmallow_enum import EnumField -from sqlalchemy import Column, DateTime, ForeignKey, Integer, Sequence, func -from sqlalchemy.orm import Mapped, declarative_base, mapped_column, object_session, registry +from sqlalchemy import Column, DateTime, ForeignKey, Integer, Sequence, event, func +from sqlalchemy.orm import Mapped, declarative_base, mapped_column, mapper, object_session, registry from typing_extensions import Any Base = declarative_base() @@ -21,7 +22,8 @@ def setup_schema(base: Base) -> callable: def setup_schema_fn(): - for class_ in base.registry._class_registry.values(): + classes = list(base.registry._class_registry.values()) + for class_ in classes: if hasattr(class_, "__tablename__"): if class_.__name__.endswith("Schema"): raise ModelConversionError( @@ -77,7 +79,6 @@ class Meta(object): # init sqlalchemy_continuum make_versioned(user_cls=None) - class BaseModel(Base): __versioned__ = {} __abstract__ = True @@ -133,13 +134,25 @@ def to_dict(self): data = schema.dump(self) data["display_name"] = self.display_name - user_schema = marshmallow.class_registry.get_class("SafeUserSchema")() - data["created_by_user"] = ( - user_schema.dump(self.created_by_user) if self.created_by_user else None - ) - data["updated_by_user"] = ( - user_schema.dump(self.updated_by_user) if self.updated_by_user else None - ) + # SafeUserSchema is not always available in the marshmallow class registry + # It is primarily used in the Flask API as a kluge for responses that are not + # using custom marshmallow schemas. + try: + _safe_user_schema_class = marshmallow.class_registry.get_class("SafeUserSchema") + + user_schema = _safe_user_schema_class() + data["created_by_user"] = ( + user_schema.dump(self.created_by_user) + if self.created_by_user + else None + ) + data["updated_by_user"] = ( + user_schema.dump(self.updated_by_user) + if self.updated_by_user + else None + ) + except marshmallow.exceptions.RegistryError: + logger.error("SafeUserSchema not found in marshmallow class registry") return data From 7209c629a4e18f13a36d723b604cc491c9457f86 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 16:01:33 -0400 Subject: [PATCH 13/34] add etl user; add history triggers to pytest Signed-off-by: John DeAngelis --- backend/data_tools/tests/conftest.py | 48 ++++++++++++++++++++-------- 1 file changed, 35 insertions(+), 13 deletions(-) diff --git a/backend/data_tools/tests/conftest.py b/backend/data_tools/tests/conftest.py index 722986ae1f..a166236707 100644 --- a/backend/data_tools/tests/conftest.py +++ b/backend/data_tools/tests/conftest.py @@ -1,9 +1,11 @@ import pytest -from sqlalchemy import create_engine, text +from data_tools.src.common.db import init_db +from sqlalchemy import event, text from sqlalchemy.exc import OperationalError -from sqlalchemy.orm import Session, scoped_session, sessionmaker +from sqlalchemy.orm import Session -from models import BaseModel +from models import BaseModel, User +from models.utils import track_db_history_after, track_db_history_before, track_db_history_catch_errors def is_responsive(db): @@ -20,24 +22,44 @@ def db_service(docker_ip, docker_services): """Ensure that DB is up and responsive.""" connection_string = "postgresql://postgres:local_password@localhost:54321/postgres" # pragma: allowlist secret - engine = create_engine(connection_string, echo=True, future=True) + db_session, engine = init_db(connection_string) docker_services.wait_until_responsive(timeout=30.0, pause=0.1, check=lambda: is_responsive(engine)) - return engine + BaseModel.metadata.create_all(engine) + return db_session, engine @pytest.fixture() -def loaded_db(db_service) -> Session: +def etl_user(db_service): + db_session, engine = db_service + etl_user = User(email="etl@example.com") + db_session.add(etl_user) + db_session.commit() + + yield etl_user + + +@pytest.fixture() +def loaded_db(db_service, etl_user) -> Session: """Get SQLAlchemy Session.""" - BaseModel.metadata.drop_all(db_service) - BaseModel.metadata.create_all(db_service) + db_session, engine = db_service + + @event.listens_for(db_session, "before_commit") + def receive_before_commit(session: Session): + track_db_history_before(session, etl_user) + + @event.listens_for(db_session, "after_flush") + def receive_after_flush(session: Session, flush_context): + track_db_history_after(session, etl_user) - session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=db_service)) + @event.listens_for(engine, "handle_error") + def receive_error(exception_context): + track_db_history_catch_errors(exception_context) - yield session + yield db_session # cleanup - session.rollback() + db_session.rollback() - session.commit() - session.close() + db_session.commit() + db_session.close() From 008d6a702873720977692676c0a065818bc9b91b Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 16:02:27 -0400 Subject: [PATCH 14/34] use api db module (with modifications) to data tools Signed-off-by: John DeAngelis --- backend/data_tools/src/common/db.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 backend/data_tools/src/common/db.py diff --git a/backend/data_tools/src/common/db.py b/backend/data_tools/src/common/db.py new file mode 100644 index 0000000000..64b842a35e --- /dev/null +++ b/backend/data_tools/src/common/db.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from sqlalchemy import Engine, create_engine +from sqlalchemy.orm import Session, scoped_session, sessionmaker + +from models import * # noqa: F403, F401 + + +def init_db( + conn_string: str, +) -> tuple[scoped_session[Session | Any], Engine]: # noqa: F405 + engine = create_engine(conn_string) + db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) + + # add the marshmallow schemas to all the models + setup_schema(BaseModel)() + + return db_session, engine From 38565d322e23ebafa4a4da381bc3a36b69da5508 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 16:03:20 -0400 Subject: [PATCH 15/34] move test to separate module Signed-off-by: John DeAngelis --- .../data_tools/tests/import_static_data/test_import_data.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/backend/data_tools/tests/import_static_data/test_import_data.py b/backend/data_tools/tests/import_static_data/test_import_data.py index 38a83a27ed..73425ddb5f 100644 --- a/backend/data_tools/tests/import_static_data/test_import_data.py +++ b/backend/data_tools/tests/import_static_data/test_import_data.py @@ -6,12 +6,6 @@ from data_tools.src.import_static_data.import_data import get_config, import_data, init_db, load_new_data -def test_init_db(db_service): - engine, metadata_obj = init_db(PytestConfig(), db_service) - assert isinstance(engine, sqlalchemy.engine.Engine) - assert isinstance(metadata_obj, sqlalchemy.MetaData) - - def test_get_config_default(): assert isinstance(get_config(), DevConfig) From 767f3b54e8671436907158c60d3451906db7cd10 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 16:04:28 -0400 Subject: [PATCH 16/34] add tests Signed-off-by: John DeAngelis --- .../tests/load_cans/test_load_cans.py | 36 ++++++++++--------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/backend/data_tools/tests/load_cans/test_load_cans.py b/backend/data_tools/tests/load_cans/test_load_cans.py index 301c38f1d3..0df622d756 100644 --- a/backend/data_tools/tests/load_cans/test_load_cans.py +++ b/backend/data_tools/tests/load_cans/test_load_cans.py @@ -1,10 +1,8 @@ import csv import pytest -import sqlalchemy.engine from data_tools.environment.dev import DevConfig -from data_tools.environment.pytest import PytestConfig -from data_tools.src.import_static_data.import_data import get_config, init_db +from data_tools.src.import_static_data.import_data import get_config from data_tools.src.load_cans.utils import ( CANData, create_can_data, @@ -13,15 +11,9 @@ validate_all, validate_data, ) -from sqlalchemy.sql import select +from sqlalchemy.orm import configure_mappers -from models import CAN, CANFundingDetails, CANFundingSource, CANMethodOfTransfer, Division, Portfolio - - -def test_init_db(db_service): - engine, metadata_obj = init_db(PytestConfig(), db_service) - assert isinstance(engine, sqlalchemy.engine.Engine) - assert isinstance(metadata_obj, sqlalchemy.MetaData) +from models import * # noqa: F403, F401 def test_get_config_default(): @@ -226,13 +218,23 @@ def test_persist_models(loaded_db): assert can_1.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() assert can_1.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar() + # make sure the version records were created + assert can_1.versions[0].number == "G99HRF2" + assert can_1.versions[0].description == "Healthy Marriages Responsible Fatherhood - OPRE" + assert can_1.versions[0].nick_name == "HMRF-OPRE" + assert can_1.versions[0].portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar().versions[0] + assert can_1.versions[0].funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar().versions[0] + + # make sure the history records are created + history_records = loaded_db.execute(select(OpsDBHistory).filter(OpsDBHistory.class_name == "CAN").order_by(OpsDBHistory.created_on.desc())).scalars().all() + assert len(history_records) == 2 + assert history_records[0].event_type == OpsDBHistoryType.NEW + assert history_records[0].row_key == "500" + assert history_records[1].event_type == OpsDBHistoryType.NEW + assert history_records[1].row_key == "501" + + # Cleanup for model in models: loaded_db.delete(model) loaded_db.commit() - - # TODO: Need to add cascade delete to models - # loaded_db.delete(division) - # for portfolio in portfolios: - # loaded_db.delete(portfolio) - # loaded_db.commit() From 2ac21534a273576cf84b91b001805cca1a7148ce Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 16:05:15 -0400 Subject: [PATCH 17/34] use init_db from db module Signed-off-by: John DeAngelis --- backend/data_tools/src/common/utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/backend/data_tools/src/common/utils.py b/backend/data_tools/src/common/utils.py index 0420b9dade..58c89c6d93 100644 --- a/backend/data_tools/src/common/utils.py +++ b/backend/data_tools/src/common/utils.py @@ -1,12 +1,13 @@ from typing import Optional +import data_tools.src.common.db as db_module import sqlalchemy from data_tools.environment.azure import AzureConfig from data_tools.environment.dev import DevConfig from data_tools.environment.local import LocalConfig from data_tools.environment.pytest import PytestConfig from data_tools.environment.types import DataToolsConfig -from sqlalchemy import Engine, create_engine +from sqlalchemy import Engine from models import BaseModel @@ -15,9 +16,7 @@ def init_db( config: DataToolsConfig, db: Optional[Engine] = None ) -> tuple[sqlalchemy.engine.Engine, sqlalchemy.MetaData]: if not db: - engine = create_engine( - config.db_connection_string, echo=config.verbosity, future=True - ) + _, engine = db_module.init_db(config.db_connection_string) else: engine = db return engine, BaseModel.metadata From fe9354743b60595ec14ad72e8e0d3032f5f9bf9b Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Mon, 21 Oct 2024 17:18:46 -0400 Subject: [PATCH 18/34] add created_by to script Signed-off-by: John DeAngelis --- backend/data_tools/src/load_cans/utils.py | 6 ++++-- backend/data_tools/tests/load_cans/test_load_cans.py | 12 ++++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/backend/data_tools/src/load_cans/utils.py b/backend/data_tools/src/load_cans/utils.py index 6b08bbdfab..b40dfc13d7 100644 --- a/backend/data_tools/src/load_cans/utils.py +++ b/backend/data_tools/src/load_cans/utils.py @@ -3,7 +3,7 @@ from loguru import logger -from models import CAN, BaseModel, CANFundingDetails, CANFundingSource, CANMethodOfTransfer, Portfolio +from models import CAN, BaseModel, CANFundingDetails, CANFundingSource, CANMethodOfTransfer, Portfolio, User @dataclass @@ -83,7 +83,7 @@ def validate_all(data: List[CANData]) -> bool: """ return sum(1 for d in data if validate_data(d)) == len(data) -def create_models(data: CANData, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: +def create_models(data: CANData, etl_user: User, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: """ Convert a CanData instance to a list of BaseModel instances. @@ -109,6 +109,7 @@ def create_models(data: CANData, portfolio_ref_data: List[Portfolio]) -> List[Ba appropriation=data.APPROP_PREFIX + "-" + data.APPROP_YEAR[0:2] + "-" + data.APPROP_POSTFIX, method_of_transfer=CANMethodOfTransfer[data.METHOD_OF_TRANSFER], funding_source=CANFundingSource[data.FUNDING_SOURCE], + created_by=etl_user.id, ) can = CAN( @@ -116,6 +117,7 @@ def create_models(data: CANData, portfolio_ref_data: List[Portfolio]) -> List[Ba number=data.CAN_NBR, description=data.CAN_DESCRIPTION, nick_name=data.NICK_NAME, + created_by=etl_user.id, ) can.funding_details = funding_details diff --git a/backend/data_tools/tests/load_cans/test_load_cans.py b/backend/data_tools/tests/load_cans/test_load_cans.py index 0df622d756..f8eb0dc084 100644 --- a/backend/data_tools/tests/load_cans/test_load_cans.py +++ b/backend/data_tools/tests/load_cans/test_load_cans.py @@ -92,7 +92,7 @@ def test_create_models_no_can_nbr(): NICK_NAME="HMRF-OPRE", ) -def test_create_models(): +def test_create_models(etl_user): portfolios = [ Portfolio( abbreviation="HMRF", @@ -123,7 +123,7 @@ def test_create_models(): NICK_NAME="HMRF-OPRE", ) - models = create_models(data, portfolios) + models = create_models(data, etl_user, portfolios) assert len(models) == 2 @@ -147,7 +147,7 @@ def test_create_models(): assert funding_details_model.active_period == 1 assert funding_details_model.obligate_by == 2024 -def test_persist_models(loaded_db): +def test_persist_models(loaded_db, etl_user): division = Division( name="Child Care", abbreviation="CC", @@ -207,7 +207,7 @@ def test_persist_models(loaded_db): NICK_NAME="HMRF-OPRE", ) - models = create_models(data_1, portfolios) + create_models(data_2, portfolios) + models = create_models(data_1, etl_user, portfolios) + create_models(data_2, etl_user, portfolios) persist_models(models, loaded_db) @@ -217,6 +217,7 @@ def test_persist_models(loaded_db): assert can_1.nick_name == "HMRF-OPRE" assert can_1.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() assert can_1.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar() + assert can_1.created_by == etl_user.id # make sure the version records were created assert can_1.versions[0].number == "G99HRF2" @@ -224,6 +225,7 @@ def test_persist_models(loaded_db): assert can_1.versions[0].nick_name == "HMRF-OPRE" assert can_1.versions[0].portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar().versions[0] assert can_1.versions[0].funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar().versions[0] + assert can_1.versions[0].created_by == etl_user.id # make sure the history records are created history_records = loaded_db.execute(select(OpsDBHistory).filter(OpsDBHistory.class_name == "CAN").order_by(OpsDBHistory.created_on.desc())).scalars().all() @@ -232,6 +234,8 @@ def test_persist_models(loaded_db): assert history_records[0].row_key == "500" assert history_records[1].event_type == OpsDBHistoryType.NEW assert history_records[1].row_key == "501" + assert history_records[0].created_by == etl_user.id + assert history_records[1].created_by == etl_user.id # Cleanup From b89d3900f742172ca155c291c8cd634ad4c02bb9 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Tue, 22 Oct 2024 09:46:43 -0400 Subject: [PATCH 19/34] complete the main script; add sys_user fixture Signed-off-by: John DeAngelis --- backend/data_tools/src/common/utils.py | 25 +++++++++++- backend/data_tools/src/load_cans/main.py | 39 ++++++++++++++++++- backend/data_tools/src/load_cans/utils.py | 20 +++++++--- backend/data_tools/tests/conftest.py | 14 +++---- .../tests/load_cans/test_load_cans.py | 16 ++++---- 5 files changed, 90 insertions(+), 24 deletions(-) diff --git a/backend/data_tools/src/common/utils.py b/backend/data_tools/src/common/utils.py index 58c89c6d93..404ea0e0d6 100644 --- a/backend/data_tools/src/common/utils.py +++ b/backend/data_tools/src/common/utils.py @@ -7,9 +7,10 @@ from data_tools.environment.local import LocalConfig from data_tools.environment.pytest import PytestConfig from data_tools.environment.types import DataToolsConfig -from sqlalchemy import Engine +from nox import Session +from sqlalchemy import Engine, select -from models import BaseModel +from models import BaseModel, User def init_db( @@ -35,3 +36,23 @@ def get_config(environment_name: Optional[str] = None) -> DataToolsConfig: case _: config = DevConfig() return config + +def get_or_create_sys_user(db: Engine) -> User: + """ + Get or create the system user. + + Args: + db: The database engine. + + Returns: + None + """ + with Session(db) as session: + user = session.execute(select(User).where(User.email == "sys-user@example.com")).scalar_one() + + if not user: + user = User(email="sys-user@example.com") + session.add(user) + session.commit() + + return user diff --git a/backend/data_tools/src/load_cans/main.py b/backend/data_tools/src/load_cans/main.py index 4954164560..cf228deffc 100644 --- a/backend/data_tools/src/load_cans/main.py +++ b/backend/data_tools/src/load_cans/main.py @@ -4,9 +4,20 @@ import click from data_tools.src.azure_utils.utils import get_csv -from data_tools.src.common.utils import get_config, init_db +from data_tools.src.common.utils import get_config, get_or_create_sys_user, init_db +from data_tools.src.load_cans.utils import ( + create_all_can_data, + create_all_models, + create_can_data, + create_models, + persist_models, + validate_all, +) from loguru import logger -from sqlalchemy import text +from sqlalchemy import select, text +from sqlalchemy.orm import Session + +from models import Portfolio, User # Set the timezone to UTC os.environ["TZ"] = "UTC" @@ -56,6 +67,30 @@ def main( logger.info(f"Loaded CSV file from {input_csv}.") + sys_user = get_or_create_sys_user(db_engine) + logger.info(f"Retrieved system user {sys_user}") + + with Session(db_engine) as session: + portfolios = list(session.execute(select(Portfolio)).scalars().all()) + + logger.info(f"Retrieved {len(portfolios)} portfolios.") + + can_data = create_all_can_data(list(csv_f)) + + logger.info(f"Created {len(can_data)} CAN data instances.") + + if not validate_all(can_data): + logger.error("Validation failed. Exiting.") + sys.exit(1) + + logger.info("Data validation passed.") + + models = create_all_models(can_data, sys_user, portfolios) + + logger.info(f"Created {len(models)} models.") + + persist_models(models, session) + logger.info("Finished the ETL process.") if __name__ == "__main__": diff --git a/backend/data_tools/src/load_cans/utils.py b/backend/data_tools/src/load_cans/utils.py index b40dfc13d7..0606002c16 100644 --- a/backend/data_tools/src/load_cans/utils.py +++ b/backend/data_tools/src/load_cans/utils.py @@ -83,7 +83,7 @@ def validate_all(data: List[CANData]) -> bool: """ return sum(1 for d in data if validate_data(d)) == len(data) -def create_models(data: CANData, etl_user: User, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: +def create_models(data: CANData, sys_user: User, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: """ Convert a CanData instance to a list of BaseModel instances. @@ -109,7 +109,7 @@ def create_models(data: CANData, etl_user: User, portfolio_ref_data: List[Portfo appropriation=data.APPROP_PREFIX + "-" + data.APPROP_YEAR[0:2] + "-" + data.APPROP_POSTFIX, method_of_transfer=CANMethodOfTransfer[data.METHOD_OF_TRANSFER], funding_source=CANFundingSource[data.FUNDING_SOURCE], - created_by=etl_user.id, + created_by=sys_user.id, ) can = CAN( @@ -117,7 +117,7 @@ def create_models(data: CANData, etl_user: User, portfolio_ref_data: List[Portfo number=data.CAN_NBR, description=data.CAN_DESCRIPTION, nick_name=data.NICK_NAME, - created_by=etl_user.id, + created_by=sys_user.id, ) can.funding_details = funding_details @@ -130,7 +130,7 @@ def create_models(data: CANData, etl_user: User, portfolio_ref_data: List[Portfo raise e return models -def create_all_models(data: List[CANData], portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: +def create_all_models(data: List[CANData], sys_user: User, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: """ Convert a list of CanData instances to a list of BaseModel instances. @@ -139,7 +139,7 @@ def create_all_models(data: List[CANData], portfolio_ref_data: List[Portfolio]) :return: A list of BaseModel instances. """ - return [m for d in data for m in create_models(d, portfolio_ref_data)] + return [m for d in data for m in create_models(d, sys_user, portfolio_ref_data)] def persist_models(models: List[BaseModel], session) -> None: """ @@ -158,3 +158,13 @@ def persist_models(models: List[BaseModel], session) -> None: session.commit() logger.info(f"Persisted {len(models)} models.") return None + +def create_all_can_data(data: List[dict]) -> List[CANData]: + """ + Convert a list of dictionaries to a list of CanData instances. + + :param data: The list of dictionaries to convert. + + :return: A list of CanData instances. + """ + return [create_can_data(d) for d in data] diff --git a/backend/data_tools/tests/conftest.py b/backend/data_tools/tests/conftest.py index a166236707..1b4749c630 100644 --- a/backend/data_tools/tests/conftest.py +++ b/backend/data_tools/tests/conftest.py @@ -29,28 +29,28 @@ def db_service(docker_ip, docker_services): return db_session, engine @pytest.fixture() -def etl_user(db_service): +def sys_user(db_service): db_session, engine = db_service - etl_user = User(email="etl@example.com") - db_session.add(etl_user) + user = User(email="sys@example.com") + db_session.add(user) db_session.commit() - yield etl_user + yield user @pytest.fixture() -def loaded_db(db_service, etl_user) -> Session: +def loaded_db(db_service, sys_user) -> Session: """Get SQLAlchemy Session.""" db_session, engine = db_service @event.listens_for(db_session, "before_commit") def receive_before_commit(session: Session): - track_db_history_before(session, etl_user) + track_db_history_before(session, sys_user) @event.listens_for(db_session, "after_flush") def receive_after_flush(session: Session, flush_context): - track_db_history_after(session, etl_user) + track_db_history_after(session, sys_user) @event.listens_for(engine, "handle_error") def receive_error(exception_context): diff --git a/backend/data_tools/tests/load_cans/test_load_cans.py b/backend/data_tools/tests/load_cans/test_load_cans.py index f8eb0dc084..85587468d4 100644 --- a/backend/data_tools/tests/load_cans/test_load_cans.py +++ b/backend/data_tools/tests/load_cans/test_load_cans.py @@ -92,7 +92,7 @@ def test_create_models_no_can_nbr(): NICK_NAME="HMRF-OPRE", ) -def test_create_models(etl_user): +def test_create_models(sys_user): portfolios = [ Portfolio( abbreviation="HMRF", @@ -123,7 +123,7 @@ def test_create_models(etl_user): NICK_NAME="HMRF-OPRE", ) - models = create_models(data, etl_user, portfolios) + models = create_models(data, sys_user, portfolios) assert len(models) == 2 @@ -147,7 +147,7 @@ def test_create_models(etl_user): assert funding_details_model.active_period == 1 assert funding_details_model.obligate_by == 2024 -def test_persist_models(loaded_db, etl_user): +def test_persist_models(loaded_db, sys_user): division = Division( name="Child Care", abbreviation="CC", @@ -207,7 +207,7 @@ def test_persist_models(loaded_db, etl_user): NICK_NAME="HMRF-OPRE", ) - models = create_models(data_1, etl_user, portfolios) + create_models(data_2, etl_user, portfolios) + models = create_models(data_1, sys_user, portfolios) + create_models(data_2, sys_user, portfolios) persist_models(models, loaded_db) @@ -217,7 +217,7 @@ def test_persist_models(loaded_db, etl_user): assert can_1.nick_name == "HMRF-OPRE" assert can_1.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() assert can_1.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar() - assert can_1.created_by == etl_user.id + assert can_1.created_by == sys_user.id # make sure the version records were created assert can_1.versions[0].number == "G99HRF2" @@ -225,7 +225,7 @@ def test_persist_models(loaded_db, etl_user): assert can_1.versions[0].nick_name == "HMRF-OPRE" assert can_1.versions[0].portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar().versions[0] assert can_1.versions[0].funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar().versions[0] - assert can_1.versions[0].created_by == etl_user.id + assert can_1.versions[0].created_by == sys_user.id # make sure the history records are created history_records = loaded_db.execute(select(OpsDBHistory).filter(OpsDBHistory.class_name == "CAN").order_by(OpsDBHistory.created_on.desc())).scalars().all() @@ -234,8 +234,8 @@ def test_persist_models(loaded_db, etl_user): assert history_records[0].row_key == "500" assert history_records[1].event_type == OpsDBHistoryType.NEW assert history_records[1].row_key == "501" - assert history_records[0].created_by == etl_user.id - assert history_records[1].created_by == etl_user.id + assert history_records[0].created_by == sys_user.id + assert history_records[1].created_by == sys_user.id # Cleanup From 62ef4a1ce8d39c6fba36fa3b9ee6cde624af0b6c Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Tue, 22 Oct 2024 13:08:31 -0400 Subject: [PATCH 20/34] merge with work from disable_users.py Signed-off-by: John DeAngelis --- backend/data_tools/src/common/utils.py | 2 + .../src/disable_users/disable_users.py | 56 ++++++------------- .../data_tools/src/disable_users/queries.py | 3 +- backend/data_tools/src/load_cans/main.py | 15 ++--- backend/data_tools/tests/conftest.py | 13 +++-- .../tests/disable_users/test_disable_users.py | 46 +++------------ 6 files changed, 41 insertions(+), 94 deletions(-) diff --git a/backend/data_tools/src/common/utils.py b/backend/data_tools/src/common/utils.py index 404ea0e0d6..43a3c1031e 100644 --- a/backend/data_tools/src/common/utils.py +++ b/backend/data_tools/src/common/utils.py @@ -12,6 +12,8 @@ from models import BaseModel, User +SYSTEM_ADMIN_OIDC_ID = "00000000-0000-1111-a111-000000000026" +SYSTEM_ADMIN_EMAIL = "system.admin@email.com" def init_db( config: DataToolsConfig, db: Optional[Engine] = None diff --git a/backend/data_tools/src/disable_users/disable_users.py b/backend/data_tools/src/disable_users/disable_users.py index b6fe16bda3..e9d0c9fb1b 100644 --- a/backend/data_tools/src/disable_users/disable_users.py +++ b/backend/data_tools/src/disable_users/disable_users.py @@ -1,13 +1,13 @@ -import logging import os +import sys +import time +from data_tools.src.common.utils import get_or_create_sys_user from data_tools.src.disable_users.queries import ( ALL_ACTIVE_USER_SESSIONS_QUERY, EXCLUDED_USER_OIDC_IDS, GET_USER_ID_BY_OIDC_QUERY, INACTIVE_USER_QUERY, - SYSTEM_ADMIN_EMAIL, - SYSTEM_ADMIN_OIDC_ID, ) from data_tools.src.import_static_data.import_data import get_config, init_db from sqlalchemy import text @@ -15,9 +15,19 @@ from models import * # noqa: F403, F401 -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) +# Set the timezone to UTC +os.environ["TZ"] = "UTC" +time.tzset() +# logger configuration +format = ( + "{time:YYYY-MM-DD HH:mm:ss} | " + "{level: <8} | " + "{name}:{function}:{line} | " + "{message}" +) +logger.add(sys.stdout, format=format, level="INFO") +logger.add(sys.stderr, format=format, level="INFO") def get_ids_from_oidc_ids(se, oidc_ids: list): """Retrieve user IDs corresponding to a list of OIDC IDs.""" @@ -33,42 +43,11 @@ def get_ids_from_oidc_ids(se, oidc_ids: list): return ids - -def create_system_admin(se): - """Create system user if it doesn't exist.""" - system_admin = se.execute( - text(GET_USER_ID_BY_OIDC_QUERY), - {"oidc_id": SYSTEM_ADMIN_OIDC_ID} - ).fetchone() - - if system_admin is None: - sys_user = User( - email=SYSTEM_ADMIN_EMAIL, - oidc_id=SYSTEM_ADMIN_OIDC_ID, - status=UserStatus.LOCKED - ) - se.add(sys_user) - se.commit() - return sys_user.id - - return system_admin[0] - - def disable_user(se, user_id, system_admin_id): """Deactivate a single user and log the change.""" updated_user = User(id=user_id, status=UserStatus.INACTIVE, updated_by=system_admin_id) se.merge(updated_user) - db_audit = build_audit(updated_user, OpsDBHistoryType.UPDATED) - ops_db_history = OpsDBHistory( - event_type=OpsDBHistoryType.UPDATED, - created_by=system_admin_id, - class_name=updated_user.__class__.__name__, - row_key=db_audit.row_key, - changes=db_audit.changes, - ) - se.add(ops_db_history) - ops_event = OpsEvent( event_type=OpsEventType.UPDATE_USER, event_status=OpsEventStatus.SUCCESS, @@ -88,9 +67,10 @@ def disable_user(se, user_id, system_admin_id): def update_disabled_users_status(conn: sqlalchemy.engine.Engine): """Update the status of disabled users in the database.""" + logger.info("Checking for System User.") + system_admin = get_or_create_sys_user(conn.engine) with Session(conn) as se: - logger.info("Checking for System User.") - system_admin_id = create_system_admin(se) + system_admin_id = system_admin.id logger.info("Fetching inactive users.") results = se.execute(text(INACTIVE_USER_QUERY)).scalars().all() diff --git a/backend/data_tools/src/disable_users/queries.py b/backend/data_tools/src/disable_users/queries.py index a43397f922..c3f280a3b2 100644 --- a/backend/data_tools/src/disable_users/queries.py +++ b/backend/data_tools/src/disable_users/queries.py @@ -1,5 +1,4 @@ -SYSTEM_ADMIN_OIDC_ID = "00000000-0000-1111-a111-000000000026" -SYSTEM_ADMIN_EMAIL = "system.admin@email.com" +from data_tools.src.common.utils import SYSTEM_ADMIN_OIDC_ID EXCLUDED_USER_OIDC_IDS = [ "00000000-0000-1111-a111-000000000018", # Admin Demo diff --git a/backend/data_tools/src/load_cans/main.py b/backend/data_tools/src/load_cans/main.py index cf228deffc..165e665859 100644 --- a/backend/data_tools/src/load_cans/main.py +++ b/backend/data_tools/src/load_cans/main.py @@ -5,19 +5,12 @@ import click from data_tools.src.azure_utils.utils import get_csv from data_tools.src.common.utils import get_config, get_or_create_sys_user, init_db -from data_tools.src.load_cans.utils import ( - create_all_can_data, - create_all_models, - create_can_data, - create_models, - persist_models, - validate_all, -) +from data_tools.src.load_cans.utils import create_all_can_data, create_all_models, persist_models, validate_all from loguru import logger from sqlalchemy import select, text from sqlalchemy.orm import Session -from models import Portfolio, User +from models import Portfolio # Set the timezone to UTC os.environ["TZ"] = "UTC" @@ -30,8 +23,8 @@ "{name}:{function}:{line} | " "{message}" ) -logger.add(sys.stdout, format=format, level="DEBUG") -logger.add(sys.stderr, format=format, level="DEBUG") +logger.add(sys.stdout, format=format, level="INFO") +logger.add(sys.stderr, format=format, level="INFO") @click.command() diff --git a/backend/data_tools/tests/conftest.py b/backend/data_tools/tests/conftest.py index 1b4749c630..50574c6e8a 100644 --- a/backend/data_tools/tests/conftest.py +++ b/backend/data_tools/tests/conftest.py @@ -1,6 +1,7 @@ import pytest from data_tools.src.common.db import init_db -from sqlalchemy import event, text +from data_tools.src.common.utils import SYSTEM_ADMIN_EMAIL, SYSTEM_ADMIN_OIDC_ID +from sqlalchemy import event, select, text from sqlalchemy.exc import OperationalError from sqlalchemy.orm import Session @@ -31,9 +32,13 @@ def db_service(docker_ip, docker_services): @pytest.fixture() def sys_user(db_service): db_session, engine = db_service - user = User(email="sys@example.com") - db_session.add(user) - db_session.commit() + + user = db_session.execute(select(User).where(User.oidc_id == SYSTEM_ADMIN_OIDC_ID)).scalar_one_or_none() + + if not user: + user = User(oidc_id=SYSTEM_ADMIN_OIDC_ID, email=SYSTEM_ADMIN_EMAIL) + db_session.add(user) + db_session.commit() yield user diff --git a/backend/data_tools/tests/disable_users/test_disable_users.py b/backend/data_tools/tests/disable_users/test_disable_users.py index 8e171baadd..073331d239 100644 --- a/backend/data_tools/tests/disable_users/test_disable_users.py +++ b/backend/data_tools/tests/disable_users/test_disable_users.py @@ -1,15 +1,9 @@ from unittest.mock import MagicMock, patch import pytest -from data_tools.src.disable_users.disable_users import ( - create_system_admin, - disable_user, - get_ids_from_oidc_ids, - update_disabled_users_status, -) -from data_tools.src.disable_users.queries import SYSTEM_ADMIN_EMAIL, SYSTEM_ADMIN_OIDC_ID +from data_tools.src.disable_users.disable_users import disable_user, get_ids_from_oidc_ids, update_disabled_users_status -from models import OpsDBHistoryType, OpsEventStatus, OpsEventType, UserStatus +from models import OpsEventStatus, OpsEventType, User, UserStatus system_admin_id = 111 @@ -20,27 +14,6 @@ def mock_session(): session.execute.return_value.fetchone.return_value = None return session -def test_create_system_admin(mock_session): - create_system_admin(mock_session) - - se_add = mock_session.add.call_args[0][0] - mock_session.execute.assert_called_once() - mock_session.add.assert_called_once() - mock_session.commit.assert_called_once() - assert se_add.email == SYSTEM_ADMIN_EMAIL - assert se_add.oidc_id == SYSTEM_ADMIN_OIDC_ID - assert se_add.first_name is None - assert se_add.last_name is None - -def test_return_existing_system_admin(mock_session): - mock_session.execute.return_value.fetchone.return_value = (system_admin_id,) - - result = create_system_admin(mock_session) - - assert result == system_admin_id - mock_session.add.assert_not_called() - mock_session.commit.assert_not_called() - def test_deactivate_user(mock_session): user_id = 1 db_history_changes = { @@ -54,7 +27,7 @@ def test_deactivate_user(mock_session): disable_user(mock_session, user_id, system_admin_id) assert mock_session.merge.call_count == 3 - assert mock_session.add.call_count == 2 + assert mock_session.add.call_count == 1 user_call = mock_session.merge.call_args_list[0] assert user_call[0][0].id == user_id @@ -66,20 +39,15 @@ def test_deactivate_user(mock_session): assert user_session_call_1[0][0].is_active is False assert user_session_call_1[0][0].updated_by == system_admin_id - ops_db_history_call = mock_session.add.call_args_list[0] - assert ops_db_history_call[0][0].event_type == OpsDBHistoryType.UPDATED - assert ops_db_history_call[0][0].created_by == system_admin_id - assert ops_db_history_call[0][0].class_name == 'User' - assert ops_db_history_call[0][0].row_key == str(user_id) - assert ops_db_history_call[0][0].changes == db_history_changes - - ops_events_call = mock_session.add.call_args_list[1] + ops_events_call = mock_session.add.call_args_list[0] assert ops_events_call[0][0].event_type == OpsEventType.UPDATE_USER assert ops_events_call[0][0].event_status == OpsEventStatus.SUCCESS assert ops_events_call[0][0].created_by == system_admin_id @patch("data_tools.src.disable_users.disable_users.logger") -def test_no_inactive_users(mock_logger, mock_session): +def test_no_inactive_users(mock_logger, mock_session, mocker): + mocker.patch("data_tools.src.disable_users.disable_users.get_or_create_sys_user", return_value=User(id=system_admin_id)) + mock_session.execute.return_value.all.return_value = None update_disabled_users_status(mock_session) From 384a6033f1856ce9e31aaf6720b35a97becb4c88 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 09:49:32 -0400 Subject: [PATCH 21/34] add integration test for main entrypoint Signed-off-by: John DeAngelis --- backend/data_tools/environment/pytest.py | 2 +- backend/data_tools/src/common/db.py | 20 ++- backend/data_tools/src/common/utils.py | 21 ++- backend/data_tools/src/load_cans/main.py | 43 +++--- backend/data_tools/src/load_cans/utils.py | 36 ++++- .../test_csv/{can.tsv => can_invalid.tsv} | 0 backend/data_tools/test_csv/can_valid.tsv | 14 ++ .../tests/azure_utils/test_utils.py | 2 +- backend/data_tools/tests/conftest.py | 16 +-- .../tests/load_cans/test_load_cans.py | 133 ++++++++++++++---- 10 files changed, 209 insertions(+), 78 deletions(-) rename backend/data_tools/test_csv/{can.tsv => can_invalid.tsv} (100%) create mode 100644 backend/data_tools/test_csv/can_valid.tsv diff --git a/backend/data_tools/environment/pytest.py b/backend/data_tools/environment/pytest.py index c309dca67d..5069d07a1b 100644 --- a/backend/data_tools/environment/pytest.py +++ b/backend/data_tools/environment/pytest.py @@ -5,7 +5,7 @@ class PytestConfig(DataToolsConfig): @property def db_connection_string(self) -> str: return ( - "postgresql://ops:ops@unittest_db:5432/postgres" # pragma: allowlist secret + "postgresql://postgres:local_password@localhost:54321/postgres" # pragma: allowlist secret ) @property diff --git a/backend/data_tools/src/common/db.py b/backend/data_tools/src/common/db.py index 64b842a35e..1749a25b85 100644 --- a/backend/data_tools/src/common/db.py +++ b/backend/data_tools/src/common/db.py @@ -1,18 +1,36 @@ from __future__ import annotations +from data_tools.src.common.utils import get_or_create_sys_user from sqlalchemy import Engine, create_engine from sqlalchemy.orm import Session, scoped_session, sessionmaker from models import * # noqa: F403, F401 +from models.utils import track_db_history_after, track_db_history_before, track_db_history_catch_errors def init_db( conn_string: str, ) -> tuple[scoped_session[Session | Any], Engine]: # noqa: F405 engine = create_engine(conn_string) + db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) - # add the marshmallow schemas to all the models setup_schema(BaseModel)() return db_session, engine + +def setup_triggers(session: scoped_session[Session | Any], sys_user: User) -> None: + + @event.listens_for(session, "before_commit") + def receive_before_commit(session: Session): + track_db_history_before(session, sys_user) + + @event.listens_for(session, "after_flush") + def receive_after_flush(session: Session, flush_context): + track_db_history_after(session, sys_user) + + @event.listens_for(session.get_bind(), "handle_error") + def receive_error(exception_context): + track_db_history_catch_errors(exception_context) + + return None diff --git a/backend/data_tools/src/common/utils.py b/backend/data_tools/src/common/utils.py index 43a3c1031e..3ef3550dc8 100644 --- a/backend/data_tools/src/common/utils.py +++ b/backend/data_tools/src/common/utils.py @@ -1,4 +1,5 @@ from typing import Optional +from uuid import UUID import data_tools.src.common.db as db_module import sqlalchemy @@ -7,8 +8,8 @@ from data_tools.environment.local import LocalConfig from data_tools.environment.pytest import PytestConfig from data_tools.environment.types import DataToolsConfig -from nox import Session from sqlalchemy import Engine, select +from sqlalchemy.orm import Session from models import BaseModel, User @@ -39,22 +40,20 @@ def get_config(environment_name: Optional[str] = None) -> DataToolsConfig: config = DevConfig() return config -def get_or_create_sys_user(db: Engine) -> User: +def get_or_create_sys_user(session: Session) -> User: """ Get or create the system user. Args: - db: The database engine. - + session: SQLAlchemy session object Returns: None """ - with Session(db) as session: - user = session.execute(select(User).where(User.email == "sys-user@example.com")).scalar_one() + user = session.execute(select(User).where(User.oidc_id == SYSTEM_ADMIN_OIDC_ID)).scalar_one_or_none() - if not user: - user = User(email="sys-user@example.com") - session.add(user) - session.commit() + if not user: + user = User(email=SYSTEM_ADMIN_EMAIL, oidc_id=UUID(SYSTEM_ADMIN_OIDC_ID)) + session.add(user) + session.commit() - return user + return user diff --git a/backend/data_tools/src/load_cans/main.py b/backend/data_tools/src/load_cans/main.py index 165e665859..8eb55e7071 100644 --- a/backend/data_tools/src/load_cans/main.py +++ b/backend/data_tools/src/load_cans/main.py @@ -4,11 +4,18 @@ import click from data_tools.src.azure_utils.utils import get_csv +from data_tools.src.common.db import setup_triggers from data_tools.src.common.utils import get_config, get_or_create_sys_user, init_db -from data_tools.src.load_cans.utils import create_all_can_data, create_all_models, persist_models, validate_all +from data_tools.src.load_cans.utils import ( + create_all_can_data, + create_all_models, + persist_models, + transform, + validate_all, +) from loguru import logger from sqlalchemy import select, text -from sqlalchemy.orm import Session +from sqlalchemy.orm import scoped_session, sessionmaker from models import Portfolio @@ -30,18 +37,15 @@ @click.command() @click.option("--env", help="The environment to use.") @click.option("--input-csv", help="The path to the CSV input file.") -@click.option("--output-csv", help="The path to the CSV output file.") def main( env: str, input_csv: str, - output_csv: str, ): """ Main entrypoint for the script. """ logger.debug(f"Environment: {env}") logger.debug(f"Input CSV: {input_csv}") - logger.debug(f"Output CSV: {output_csv}") logger.info("Starting the ETL process.") @@ -60,30 +64,23 @@ def main( logger.info(f"Loaded CSV file from {input_csv}.") - sys_user = get_or_create_sys_user(db_engine) - logger.info(f"Retrieved system user {sys_user}") + Session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=db_engine)) - with Session(db_engine) as session: - portfolios = list(session.execute(select(Portfolio)).scalars().all()) + with Session() as session: + sys_user = get_or_create_sys_user(session) + logger.info(f"Retrieved system user {sys_user}") - logger.info(f"Retrieved {len(portfolios)} portfolios.") + setup_triggers(session, sys_user) - can_data = create_all_can_data(list(csv_f)) - - logger.info(f"Created {len(can_data)} CAN data instances.") + portfolios = list(session.execute(select(Portfolio)).scalars().all()) + logger.info(f"Retrieved {len(portfolios)} portfolios.") - if not validate_all(can_data): - logger.error("Validation failed. Exiting.") + try: + transform(csv_f, portfolios, session, sys_user) + except RuntimeError as re: + logger.error(f"Error transforming data: {re}") sys.exit(1) - logger.info("Data validation passed.") - - models = create_all_models(can_data, sys_user, portfolios) - - logger.info(f"Created {len(models)} models.") - - persist_models(models, session) - logger.info("Finished the ETL process.") if __name__ == "__main__": diff --git a/backend/data_tools/src/load_cans/utils.py b/backend/data_tools/src/load_cans/utils.py index 0606002c16..6fa0114b29 100644 --- a/backend/data_tools/src/load_cans/utils.py +++ b/backend/data_tools/src/load_cans/utils.py @@ -1,7 +1,9 @@ +from csv import DictReader from dataclasses import dataclass, field from typing import List from loguru import logger +from sqlalchemy.orm import Session from models import CAN, BaseModel, CANFundingDetails, CANFundingSource, CANMethodOfTransfer, Portfolio, User @@ -88,6 +90,7 @@ def create_models(data: CANData, sys_user: User, portfolio_ref_data: List[Portfo Convert a CanData instance to a list of BaseModel instances. :param data: The CanData instance to convert. + :param sys_user: The system user to use. :param portfolio_ref_data: A list of Portfolio instances to use as reference data. :return: A list of BaseModel instances. @@ -106,7 +109,7 @@ def create_models(data: CANData, sys_user: User, portfolio_ref_data: List[Portfo allowance=data.ALLOWANCE, sub_allowance=data.SUB_ALLOWANCE, allotment=data.ALLOTMENT_ORG, - appropriation=data.APPROP_PREFIX + "-" + data.APPROP_YEAR[0:2] + "-" + data.APPROP_POSTFIX, + appropriation="-".join([data.APPROP_PREFIX or "", data.APPROP_YEAR[0:2] or "", data.APPROP_POSTFIX or ""]), method_of_transfer=CANMethodOfTransfer[data.METHOD_OF_TRANSFER], funding_source=CANFundingSource[data.FUNDING_SOURCE], created_by=sys_user.id, @@ -168,3 +171,34 @@ def create_all_can_data(data: List[dict]) -> List[CANData]: :return: A list of CanData instances. """ return [create_can_data(d) for d in data] + + +def transform(data: DictReader, portfolios: List[Portfolio], session: Session, sys_user: User) -> None: + """ + Transform the data from the CSV file and persist the models to the database. + + :param data: The data from the CSV file. + :param portfolios: The portfolios to use as reference data. + :param session: The database session to use. + :param sys_user: The system user to use. + + :return: None + """ + if not data or not portfolios or not session or not sys_user: + logger.error("No data to process. Exiting.") + raise RuntimeError("No data to process.") + + can_data = create_all_can_data(list(data)) + logger.info(f"Created {len(can_data)} CAN data instances.") + + if not validate_all(can_data): + logger.error("Validation failed. Exiting.") + raise RuntimeError("Validation failed.") + + logger.info("Data validation passed.") + + models = create_all_models(can_data, sys_user, portfolios) + logger.info(f"Created {len(models)} models.") + + persist_models(models, session) + logger.info("Persisted models.") diff --git a/backend/data_tools/test_csv/can.tsv b/backend/data_tools/test_csv/can_invalid.tsv similarity index 100% rename from backend/data_tools/test_csv/can.tsv rename to backend/data_tools/test_csv/can_invalid.tsv diff --git a/backend/data_tools/test_csv/can_valid.tsv b/backend/data_tools/test_csv/can_valid.tsv new file mode 100644 index 0000000000..64ce535014 --- /dev/null +++ b/backend/data_tools/test_csv/can_valid.tsv @@ -0,0 +1,14 @@ +SYS_CAN_ID CAN_NBR CAN_DESCRIPTION FUND ALLOWANCE ALLOTMENT_ORG SUB_ALLOWANCE CURRENT_FY_FUNDING_YTD APPROP_PREFIX APPROP_POSTFIX APPROP_YEAR PORTFOLIO FUNDING_SOURCE METHOD_OF_TRANSFER NICK_NAME +500 G99HRF2 Healthy Marriages Responsible Fatherhood - OPRE AAXXXX20231DAD 0000000001 YZC6S1JUGUN 9KRZ2ND 880000.0 XX XXXX 23 HMRF OPRE DIRECT HMRF-OPRE +505 G994648 Kinship Navigation FFXXXX20215DAD 0000000006 KCTQYEKJ4F6 G4N2ZIV 880000.0 XX XXXX 2125 HMRF OPRE IAA Kin-Nav +506 G996125 Healthy Marriages Responsible Fatherhood - OFA GGXXXX20231DAD 0000000007 U0Z8853GL8U PVJMU38 6000000.0 XX XXXX 23 HMRF OPRE COST_SHARE HMRF-OFA +507 G99XXX1 Healthy Marriages Responsible Fatherhood - OFA HHXXXX20231DAD 0000000008 GRSTZYBTRKH NIN2LL8 880000.0 XX XXXX 23 HMRF HHS COST_SHARE HMRF-OFA +508 G99XXX2 Healthy Marriages Responsible Fatherhood - OFA IIXXXX20231DAD 0000000009 VU1QDFMSGAG L2RMG9Q 880000.0 XX XXXX 23 HMRF ACF IAA HMRF-OFA +509 G99XXX3 Healthy Marriages Responsible Fatherhood - OFA JJXXXX20231DAD 0000000010 UBYZ7579P7D 5D60VLO "" XXXX 23 HMRF HHS IDDA HMRF-OFA +510 G99XXX4 Healthy Marriages Responsible Fatherhood - OFA KKXXXX20235DAD 0000000011 BGY4HR6QIYV TTEZ1IT 6000000.0 "" XXXX 2327 HMRF OPRE IDDA HMRF-OFA +511 G99XXX7 Healthy Marriages Responsible Fatherhood - OFA LLXXXX20225DAD 0000000012 YC2EI1SP73Y BYN6BFT 6000000.0 XX XXXX 2226 HMRF HHS COST_SHARE HMRF-OFA +512 G99XXX8 Example CAN MMXXXX20235DAD 0000000013 Q9WC0FO90M1 9IQGXAD 880000.0 XX XXXX 2327 CC OPRE DIRECT +513 G99MV23 MIHOPE Check-in 2023 NNXXXX20231DAD 0000000014 A8VRB6VA1J2 A1SN3C4 1000000.0 XX XXXX 23 CC ACF DIRECT MIHOPE 23 +514 G99MV24 MIHOPE Check-in 2024 OOXXXX20235DAD 0000000015 797583A9P4B I9692UV XX XXXX 2327 CC HHS IDDA MIHOPE 24 +515 G99MVT3 MOHOPE Long-Term PPXXXX20235DAD 0000000016 YIUZPLILXL9 NWHI19Z 1000000.0 XX XXXX 2327 CC HHS DIRECT MIHOPE LT +516 G99SHARED Shared CAN QQXXXX20235DAD 0000000017 NB8XXJF9OIU M6C11PL XX XXXX 2327 CC OPRE COST_SHARE SHARED diff --git a/backend/data_tools/tests/azure_utils/test_utils.py b/backend/data_tools/tests/azure_utils/test_utils.py index 552e0351b2..4509e9b4eb 100644 --- a/backend/data_tools/tests/azure_utils/test_utils.py +++ b/backend/data_tools/tests/azure_utils/test_utils.py @@ -32,7 +32,7 @@ def test_get_csv(mocker): assert data[2]["name"] == "DIV3" # Test with a local file - result = get_csv("test_csv/can.tsv") + result = get_csv("test_csv/can_invalid.tsv") assert result is not None data = list(result) assert len(data) == 17 diff --git a/backend/data_tools/tests/conftest.py b/backend/data_tools/tests/conftest.py index 50574c6e8a..237bc9f2db 100644 --- a/backend/data_tools/tests/conftest.py +++ b/backend/data_tools/tests/conftest.py @@ -1,6 +1,6 @@ import pytest from data_tools.src.common.db import init_db -from data_tools.src.common.utils import SYSTEM_ADMIN_EMAIL, SYSTEM_ADMIN_OIDC_ID +from data_tools.src.common.utils import SYSTEM_ADMIN_EMAIL, SYSTEM_ADMIN_OIDC_ID, get_or_create_sys_user from sqlalchemy import event, select, text from sqlalchemy.exc import OperationalError from sqlalchemy.orm import Session @@ -31,16 +31,9 @@ def db_service(docker_ip, docker_services): @pytest.fixture() def sys_user(db_service): - db_session, engine = db_service - - user = db_session.execute(select(User).where(User.oidc_id == SYSTEM_ADMIN_OIDC_ID)).scalar_one_or_none() + db_session, _ = db_service - if not user: - user = User(oidc_id=SYSTEM_ADMIN_OIDC_ID, email=SYSTEM_ADMIN_EMAIL) - db_session.add(user) - db_session.commit() - - yield user + yield get_or_create_sys_user(db_session) @pytest.fixture() @@ -66,5 +59,8 @@ def receive_error(exception_context): # cleanup db_session.rollback() + # cleanup history records + db_session.execute(text("DELETE FROM ops_db_history")) + db_session.commit() db_session.close() diff --git a/backend/data_tools/tests/load_cans/test_load_cans.py b/backend/data_tools/tests/load_cans/test_load_cans.py index 85587468d4..579e3f3125 100644 --- a/backend/data_tools/tests/load_cans/test_load_cans.py +++ b/backend/data_tools/tests/load_cans/test_load_cans.py @@ -1,8 +1,11 @@ import csv import pytest +from click.testing import CliRunner from data_tools.environment.dev import DevConfig +from data_tools.src.common.utils import get_or_create_sys_user from data_tools.src.import_static_data.import_data import get_config +from data_tools.src.load_cans.main import main from data_tools.src.load_cans.utils import ( CANData, create_can_data, @@ -11,17 +14,58 @@ validate_all, validate_data, ) -from sqlalchemy.orm import configure_mappers +from sqlalchemy import and_, text from models import * # noqa: F403, F401 +@pytest.fixture() +def test_division(loaded_db): + division = loaded_db.get(Division, 999) + + if not division: + division = Division( + id=999, + name="Fake Division", + abbreviation="FD", + ) + loaded_db.merge(division) + loaded_db.commit() + + yield division + +@pytest.fixture() +def test_portfolio(loaded_db, test_division): + portfolio_1 = loaded_db.get(Portfolio, 1) + portfolio_2 = loaded_db.get(Portfolio, 2) + + if not portfolio_1 or not portfolio_2: + portfolio_1 = Portfolio( + id=1, + abbreviation="HMRF", + name="Healthy Marriages Responsible Fatherhood", + division_id=test_division.id, + ) + + portfolio_2 = Portfolio( + id=2, + abbreviation="CC", + name="Child Care", + division_id=test_division.id, + ) + + loaded_db.add_all([portfolio_1, portfolio_2]) + loaded_db.commit() + + yield portfolio_1, portfolio_2 + + def test_get_config_default(): assert isinstance(get_config(), DevConfig) def test_create_can_data(): - test_data = list(csv.DictReader(open("test_csv/can.tsv"), dialect="excel-tab")) + test_data = list(csv.DictReader(open("test_csv/can_invalid.tsv"), dialect="excel-tab")) assert len(test_data) == 17 @@ -42,13 +86,13 @@ def test_create_can_data(): assert create_can_data(test_data[0]).NICK_NAME == "HMRF-OPRE" def test_validate_data(): - test_data = list(csv.DictReader(open("test_csv/can.tsv"), dialect="excel-tab")) + test_data = list(csv.DictReader(open("test_csv/can_invalid.tsv"), dialect="excel-tab")) assert len(test_data) == 17 count = sum(1 for data in test_data if validate_data(create_can_data(data))) assert count == 10 def test_validate_all(): - test_data = list(csv.DictReader(open("test_csv/can.tsv"), dialect="excel-tab")) + test_data = list(csv.DictReader(open("test_csv/can_invalid.tsv"), dialect="excel-tab")) assert len(test_data) == 17 can_data = [create_can_data(data) for data in test_data] assert validate_all(can_data) == False @@ -147,30 +191,7 @@ def test_create_models(sys_user): assert funding_details_model.active_period == 1 assert funding_details_model.obligate_by == 2024 -def test_persist_models(loaded_db, sys_user): - division = Division( - name="Child Care", - abbreviation="CC", - ) - loaded_db.add(division) - loaded_db.commit() - - portfolios = [ - Portfolio( - abbreviation="HMRF", - name="Healthy Marriages Responsible Fatherhood", - division_id=division.id, - ), - Portfolio( - abbreviation="CC", - name="Child Care", - division_id=division.id, - ), - ] - - loaded_db.add_all(portfolios) - loaded_db.commit() - +def test_persist_models(loaded_db, sys_user, test_division, test_portfolio): data_1 = CANData( SYS_CAN_ID=500, CAN_NBR="G99HRF2", @@ -207,7 +228,7 @@ def test_persist_models(loaded_db, sys_user): NICK_NAME="HMRF-OPRE", ) - models = create_models(data_1, sys_user, portfolios) + create_models(data_2, sys_user, portfolios) + models = create_models(data_1, sys_user, list(test_portfolio)) + create_models(data_2, sys_user, list(test_portfolio)) persist_models(models, loaded_db) @@ -237,8 +258,60 @@ def test_persist_models(loaded_db, sys_user): assert history_records[0].created_by == sys_user.id assert history_records[1].created_by == sys_user.id - # Cleanup for model in models: loaded_db.delete(model) loaded_db.commit() + + +def test_main(loaded_db, mocker, test_division, test_portfolio): + result = CliRunner().invoke( + main, + [ + "--env", + "pytest", + "--input-csv", + "test_csv/can_valid.tsv", + ], + ) + + assert result.exit_code == 0 + + # make sure the data was loaded + can_1 = loaded_db.get(CAN, 500) + assert can_1.number == "G99HRF2" + assert can_1.description == "Healthy Marriages Responsible Fatherhood - OPRE" + assert can_1.nick_name == "HMRF-OPRE" + assert can_1.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() + assert can_1.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar() + assert can_1.funding_details.fiscal_year == 2023 + assert can_1.funding_details.fund_code == "AAXXXX20231DAD" + assert can_1.funding_details.allowance == "0000000001" + assert can_1.funding_details.sub_allowance == "9KRZ2ND" + assert can_1.funding_details.allotment == "YZC6S1JUGUN" + assert can_1.funding_details.appropriation == "XX-23-XXXX" + assert can_1.funding_details.method_of_transfer == CANMethodOfTransfer.DIRECT + assert can_1.funding_details.funding_source == CANFundingSource.OPRE + assert can_1.funding_details.created_by == get_or_create_sys_user(loaded_db).id + + can_2 = loaded_db.get(CAN, 505) + assert can_2.number == "G994648" + assert can_2.description == "Kinship Navigation" + assert can_2.nick_name == "Kin-Nav" + assert can_2.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() + assert can_2.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "FFXXXX20215DAD")).scalar() + assert can_2.funding_details.fiscal_year == 2021 + assert can_2.funding_details.fund_code == "FFXXXX20215DAD" + assert can_2.funding_details.allowance == "0000000006" + assert can_2.funding_details.sub_allowance == "G4N2ZIV" + assert can_2.funding_details.allotment == "KCTQYEKJ4F6" + assert can_2.funding_details.appropriation == "XX-21-XXXX" + assert can_2.funding_details.method_of_transfer == CANMethodOfTransfer.IAA + assert can_2.funding_details.funding_source == CANFundingSource.OPRE + assert can_2.funding_details.created_by == get_or_create_sys_user(loaded_db).id + + history_objs = loaded_db.execute(select(OpsDBHistory).filter(OpsDBHistory.class_name == "CAN")).scalars().all() + assert len(history_objs) == 13 + + can_1_history = loaded_db.execute(select(OpsDBHistory).where(and_(OpsDBHistory.row_key == "500", OpsDBHistory.class_name == "CAN"))).scalars().all() + assert len(can_1_history) == 1 From a49d081a5d20fd9f5a20deec6c1e6b7817f67aae Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 13:28:23 -0400 Subject: [PATCH 22/34] use session instead of engine to get sys_user Signed-off-by: John DeAngelis --- backend/data_tools/src/disable_users/disable_users.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/backend/data_tools/src/disable_users/disable_users.py b/backend/data_tools/src/disable_users/disable_users.py index e9d0c9fb1b..71a239fcd1 100644 --- a/backend/data_tools/src/disable_users/disable_users.py +++ b/backend/data_tools/src/disable_users/disable_users.py @@ -67,9 +67,11 @@ def disable_user(se, user_id, system_admin_id): def update_disabled_users_status(conn: sqlalchemy.engine.Engine): """Update the status of disabled users in the database.""" - logger.info("Checking for System User.") - system_admin = get_or_create_sys_user(conn.engine) + with Session(conn) as se: + logger.info("Checking for System User.") + system_admin = get_or_create_sys_user(se) + system_admin_id = system_admin.id logger.info("Fetching inactive users.") From 9143af19093bbc61da38e44748d3a037d097aa0b Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 13:29:06 -0400 Subject: [PATCH 23/34] renamed script Signed-off-by: John DeAngelis --- backend/data_tools/scripts/{get_csv.sh => load_cans.sh} | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) rename backend/data_tools/scripts/{get_csv.sh => load_cans.sh} (72%) diff --git a/backend/data_tools/scripts/get_csv.sh b/backend/data_tools/scripts/load_cans.sh similarity index 72% rename from backend/data_tools/scripts/get_csv.sh rename to backend/data_tools/scripts/load_cans.sh index d4b76607a7..4267afd099 100755 --- a/backend/data_tools/scripts/get_csv.sh +++ b/backend/data_tools/scripts/load_cans.sh @@ -5,17 +5,14 @@ export PYTHONPATH=.:$PYTHONPATH ENV=$1 INPUT_CSV=$2 -OUTPUT_CSV=$3 echo "Activating virtual environment..." . .venv/bin/activate echo "ENV is $ENV" echo "INPUT_CSV is $INPUT_CSV" -echo "OUTPUT_CSV is $OUTPUT_CSV" echo "Running script..." python data_tools/src/load_cans/main.py \ --env "${ENV}" \ ---input-csv "${INPUT_CSV}" \ ---output-csv "${OUTPUT_CSV}" +--input-csv "${INPUT_CSV}" From 35b05786a233890d117d32daf2be642d1f341a2c Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 15:55:35 -0400 Subject: [PATCH 24/34] remove sys_user pytest fixture Signed-off-by: John DeAngelis --- backend/data_tools/tests/conftest.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/backend/data_tools/tests/conftest.py b/backend/data_tools/tests/conftest.py index 237bc9f2db..bbe3838346 100644 --- a/backend/data_tools/tests/conftest.py +++ b/backend/data_tools/tests/conftest.py @@ -30,24 +30,19 @@ def db_service(docker_ip, docker_services): return db_session, engine @pytest.fixture() -def sys_user(db_service): - db_session, _ = db_service - - yield get_or_create_sys_user(db_session) - - -@pytest.fixture() -def loaded_db(db_service, sys_user) -> Session: +def loaded_db(db_service) -> Session: """Get SQLAlchemy Session.""" db_session, engine = db_service @event.listens_for(db_session, "before_commit") def receive_before_commit(session: Session): + sys_user = get_or_create_sys_user(session) track_db_history_before(session, sys_user) @event.listens_for(db_session, "after_flush") def receive_after_flush(session: Session, flush_context): + sys_user = get_or_create_sys_user(session) track_db_history_after(session, sys_user) @event.listens_for(engine, "handle_error") From cafbfaec9891136ff2d0c1c5a9db511d9ae874d3 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 15:56:56 -0400 Subject: [PATCH 25/34] move init_db_from_config Signed-off-by: John DeAngelis --- backend/data_tools/src/common/db.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/backend/data_tools/src/common/db.py b/backend/data_tools/src/common/db.py index 1749a25b85..7cf37119ce 100644 --- a/backend/data_tools/src/common/db.py +++ b/backend/data_tools/src/common/db.py @@ -1,10 +1,11 @@ from __future__ import annotations -from data_tools.src.common.utils import get_or_create_sys_user +from data_tools.environment.types import DataToolsConfig from sqlalchemy import Engine, create_engine from sqlalchemy.orm import Session, scoped_session, sessionmaker from models import * # noqa: F403, F401 +from models import BaseModel from models.utils import track_db_history_after, track_db_history_before, track_db_history_catch_errors @@ -34,3 +35,13 @@ def receive_error(exception_context): track_db_history_catch_errors(exception_context) return None + + +def init_db_from_config( + config: DataToolsConfig, db: Optional[Engine] = None +) -> tuple[sqlalchemy.engine.Engine, sqlalchemy.MetaData]: + if not db: + _, engine = init_db(config.db_connection_string) + else: + engine = db + return engine, BaseModel.metadata From 3aa11d2dc53218e37ac02481e6c9b33b58654030 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 15:57:26 -0400 Subject: [PATCH 26/34] move init_db_from_config Signed-off-by: John DeAngelis --- backend/data_tools/src/import_static_data/import_data.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/data_tools/src/import_static_data/import_data.py b/backend/data_tools/src/import_static_data/import_data.py index 02579f0295..1a6ca15ab2 100644 --- a/backend/data_tools/src/import_static_data/import_data.py +++ b/backend/data_tools/src/import_static_data/import_data.py @@ -2,7 +2,8 @@ import os import json5 -from data_tools.src.common.utils import get_config, init_db +from data_tools.src.common.db import init_db +from data_tools.src.common.utils import get_config from sqlalchemy import text from sqlalchemy.engine import Connection, Engine from sqlalchemy.orm import Session From 443224a8335784ecc23705ccdfc7973b5f3b76ff Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 15:58:01 -0400 Subject: [PATCH 27/34] move init_db_from_config Signed-off-by: John DeAngelis --- backend/data_tools/src/import_static_data/load_db.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/data_tools/src/import_static_data/load_db.py b/backend/data_tools/src/import_static_data/load_db.py index 0a52480b67..720e799874 100644 --- a/backend/data_tools/src/import_static_data/load_db.py +++ b/backend/data_tools/src/import_static_data/load_db.py @@ -1,7 +1,8 @@ import os import sqlalchemy.engine -from data_tools.src.common.utils import get_config, init_db +from data_tools.src.common.db import init_db_from_config +from data_tools.src.common.utils import get_config from sqlalchemy.orm import configure_mappers from models import BaseModel @@ -18,7 +19,7 @@ def delete_and_create(engine: sqlalchemy.engine.Engine) -> None: script_env = os.getenv("ENV") script_config = get_config(script_env) - db_engine, db_metadata_obj = init_db(script_config) + db_engine, db_metadata_obj = init_db_from_config(script_config) delete_and_create(db_engine) From da7f2fb7a0f97a8f2469afe3e1cfa3dc74be4d31 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 15:59:17 -0400 Subject: [PATCH 28/34] move init_db_from_config Signed-off-by: John DeAngelis --- backend/data_tools/src/load_cans/main.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/backend/data_tools/src/load_cans/main.py b/backend/data_tools/src/load_cans/main.py index 8eb55e7071..6c18f57846 100644 --- a/backend/data_tools/src/load_cans/main.py +++ b/backend/data_tools/src/load_cans/main.py @@ -4,15 +4,9 @@ import click from data_tools.src.azure_utils.utils import get_csv -from data_tools.src.common.db import setup_triggers -from data_tools.src.common.utils import get_config, get_or_create_sys_user, init_db -from data_tools.src.load_cans.utils import ( - create_all_can_data, - create_all_models, - persist_models, - transform, - validate_all, -) +from data_tools.src.common.db import init_db_from_config, setup_triggers +from data_tools.src.common.utils import get_config, get_or_create_sys_user +from data_tools.src.load_cans.utils import create_all_can_data, create_all_models, transform, validate_all from loguru import logger from sqlalchemy import select, text from sqlalchemy.orm import scoped_session, sessionmaker @@ -50,7 +44,7 @@ def main( logger.info("Starting the ETL process.") script_config = get_config(env) - db_engine, db_metadata_obj = init_db(script_config) + db_engine, db_metadata_obj = init_db_from_config(script_config) if db_engine is None: logger.error("Failed to initialize the database engine.") From b14b6078d153697bbfaff44f95d7193ea5004107 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 15:59:57 -0400 Subject: [PATCH 29/34] get upsert working properly Signed-off-by: John DeAngelis --- backend/data_tools/src/common/utils.py | 19 +- backend/data_tools/src/load_cans/utils.py | 145 +++++--- backend/data_tools/tests/common/test_utils.py | 4 +- .../tests/load_cans/test_load_cans.py | 319 ++++++++++-------- 4 files changed, 292 insertions(+), 195 deletions(-) diff --git a/backend/data_tools/src/common/utils.py b/backend/data_tools/src/common/utils.py index 3ef3550dc8..e4e9988deb 100644 --- a/backend/data_tools/src/common/utils.py +++ b/backend/data_tools/src/common/utils.py @@ -1,30 +1,19 @@ from typing import Optional from uuid import UUID -import data_tools.src.common.db as db_module -import sqlalchemy from data_tools.environment.azure import AzureConfig from data_tools.environment.dev import DevConfig from data_tools.environment.local import LocalConfig from data_tools.environment.pytest import PytestConfig from data_tools.environment.types import DataToolsConfig -from sqlalchemy import Engine, select +from sqlalchemy import select from sqlalchemy.orm import Session -from models import BaseModel, User +from models import User SYSTEM_ADMIN_OIDC_ID = "00000000-0000-1111-a111-000000000026" SYSTEM_ADMIN_EMAIL = "system.admin@email.com" -def init_db( - config: DataToolsConfig, db: Optional[Engine] = None -) -> tuple[sqlalchemy.engine.Engine, sqlalchemy.MetaData]: - if not db: - _, engine = db_module.init_db(config.db_connection_string) - else: - engine = db - return engine, BaseModel.metadata - def get_config(environment_name: Optional[str] = None) -> DataToolsConfig: match environment_name: @@ -53,7 +42,7 @@ def get_or_create_sys_user(session: Session) -> User: if not user: user = User(email=SYSTEM_ADMIN_EMAIL, oidc_id=UUID(SYSTEM_ADMIN_OIDC_ID)) - session.add(user) - session.commit() + # session.add(user) + # session.commit() return user diff --git a/backend/data_tools/src/load_cans/utils.py b/backend/data_tools/src/load_cans/utils.py index 6fa0114b29..288dce46ac 100644 --- a/backend/data_tools/src/load_cans/utils.py +++ b/backend/data_tools/src/load_cans/utils.py @@ -3,6 +3,7 @@ from typing import List from loguru import logger +from sqlalchemy import and_, select from sqlalchemy.orm import Session from models import CAN, BaseModel, CANFundingDetails, CANFundingSource, CANMethodOfTransfer, Portfolio, User @@ -85,82 +86,92 @@ def validate_all(data: List[CANData]) -> bool: """ return sum(1 for d in data if validate_data(d)) == len(data) -def create_models(data: CANData, sys_user: User, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: +def create_models(data: CANData, sys_user: User, session: Session) -> None: """ - Convert a CanData instance to a list of BaseModel instances. + Convert a CanData instance to a BaseModel instance. :param data: The CanData instance to convert. :param sys_user: The system user to use. - :param portfolio_ref_data: A list of Portfolio instances to use as reference data. + :param session: The database session to use. :return: A list of BaseModel instances. """ logger.debug(f"Creating models for {data}") - models: List[BaseModel] = [] try: - portfolio = next(p for p in portfolio_ref_data if p.abbreviation == data.PORTFOLIO) + portfolio = session.execute(select(Portfolio).where(Portfolio.abbreviation == data.PORTFOLIO)).scalar_one_or_none() if not portfolio: raise ValueError(f"Portfolio not found for {data.PORTFOLIO}") - funding_details = CANFundingDetails( - fiscal_year=int(data.FUND[6:10]), - fund_code=data.FUND, - allowance=data.ALLOWANCE, - sub_allowance=data.SUB_ALLOWANCE, - allotment=data.ALLOTMENT_ORG, - appropriation="-".join([data.APPROP_PREFIX or "", data.APPROP_YEAR[0:2] or "", data.APPROP_POSTFIX or ""]), - method_of_transfer=CANMethodOfTransfer[data.METHOD_OF_TRANSFER], - funding_source=CANFundingSource[data.FUNDING_SOURCE], - created_by=sys_user.id, - ) - can = CAN( - id=data.SYS_CAN_ID, + id=data.SYS_CAN_ID if data.SYS_CAN_ID else None, number=data.CAN_NBR, description=data.CAN_DESCRIPTION, nick_name=data.NICK_NAME, created_by=sys_user.id, ) - can.funding_details = funding_details can.portfolio = portfolio - models.append(can) - models.append(funding_details) + # get or create funding details + fiscal_year = int(data.FUND[6:10]) + fund_code = data.FUND + allowance = data.ALLOWANCE + sub_allowance = data.SUB_ALLOWANCE + allotment = data.ALLOTMENT_ORG + appropriation = "-".join([data.APPROP_PREFIX or "", data.APPROP_YEAR[0:2] or "", data.APPROP_POSTFIX or ""]) + method_of_transfer = CANMethodOfTransfer[data.METHOD_OF_TRANSFER] + funding_source = CANFundingSource[data.FUNDING_SOURCE] + + existing_funding_details = session.execute(select(CANFundingDetails).where( + and_( + CANFundingDetails.fiscal_year == fiscal_year, + CANFundingDetails.fund_code == fund_code, + CANFundingDetails.allowance == allowance, + CANFundingDetails.sub_allowance == sub_allowance, + CANFundingDetails.allotment == allotment, + CANFundingDetails.appropriation == appropriation, + CANFundingDetails.method_of_transfer == method_of_transfer, + CANFundingDetails.funding_source == funding_source, + ))).scalar_one_or_none() + + if not existing_funding_details: + funding_details = CANFundingDetails( + fiscal_year=fiscal_year, + fund_code=fund_code, + allowance=allowance, + sub_allowance=sub_allowance, + allotment=allotment, + appropriation=appropriation, + method_of_transfer=method_of_transfer, + funding_source=funding_source, + created_by=sys_user.id, + ) + session.add(funding_details) + session.commit() + can.funding_details = funding_details + else: + can.funding_details = existing_funding_details + + session.merge(can) + session.commit() except Exception as e: logger.error(f"Error creating models for {data}") raise e - return models -def create_all_models(data: List[CANData], sys_user: User, portfolio_ref_data: List[Portfolio]) -> List[BaseModel]: +def create_all_models(data: List[CANData], sys_user: User, session: Session) -> None: """ Convert a list of CanData instances to a list of BaseModel instances. :param data: The list of CanData instances to convert. - :param portfolio_ref_data: A list of Portfolio instances to use as reference data. + :param sys_user: The system user to use. + :param session: The database session to use. :return: A list of BaseModel instances. """ - return [m for d in data for m in create_models(d, sys_user, portfolio_ref_data)] - -def persist_models(models: List[BaseModel], session) -> None: - """ - Persist a list of models to the database. + for d in data: + create_models(d, sys_user, session) - :param models: The list of models to persist. - :param session: The database session to use. - """ - for model in models: - obj = session.get(type(model), model.id) - - if obj: - session.merge(model) - else: - session.add(model) - session.commit() - logger.info(f"Persisted {len(models)} models.") - return None def create_all_can_data(data: List[dict]) -> List[CANData]: """ @@ -197,8 +208,50 @@ def transform(data: DictReader, portfolios: List[Portfolio], session: Session, s logger.info("Data validation passed.") - models = create_all_models(can_data, sys_user, portfolios) - logger.info(f"Created {len(models)} models.") + create_all_models(can_data, sys_user, session) + logger.info(f"Created models.") + - persist_models(models, session) - logger.info("Persisted models.") +def create_can_funding_details_model(data: CANData, sys_user: User, session: Session) -> BaseModel: + logger.debug(f"Creating model for {data}") + + try: + fiscal_year = int(data.FUND[6:10]) + fund_code = data.FUND + allowance = data.ALLOWANCE + sub_allowance = data.SUB_ALLOWANCE + allotment = data.ALLOTMENT_ORG + appropriation = "-".join([data.APPROP_PREFIX or "", data.APPROP_YEAR[0:2] or "", data.APPROP_POSTFIX or ""]) + method_of_transfer = CANMethodOfTransfer[data.METHOD_OF_TRANSFER] + funding_source = CANFundingSource[data.FUNDING_SOURCE] + + existing_funding_details = session.execute(select(CANFundingDetails).where( + and_( + CANFundingDetails.fiscal_year == fiscal_year, + CANFundingDetails.fund_code == fund_code, + CANFundingDetails.allowance == allowance, + CANFundingDetails.sub_allowance == sub_allowance, + CANFundingDetails.allotment == allotment, + CANFundingDetails.appropriation == appropriation, + CANFundingDetails.method_of_transfer == method_of_transfer, + CANFundingDetails.funding_source == funding_source, + ))).scalar_one_or_none() + + if not existing_funding_details: + funding_details = CANFundingDetails( + fiscal_year=fiscal_year, + fund_code=fund_code, + allowance=allowance, + sub_allowance=sub_allowance, + allotment=allotment, + appropriation=appropriation, + method_of_transfer=method_of_transfer, + funding_source=funding_source, + created_by=sys_user.id, + ) + return funding_details + else: + return existing_funding_details + except Exception as e: + logger.error(f"Error creating model for {data}") + raise e diff --git a/backend/data_tools/tests/common/test_utils.py b/backend/data_tools/tests/common/test_utils.py index 947f5b7900..73963463ab 100644 --- a/backend/data_tools/tests/common/test_utils.py +++ b/backend/data_tools/tests/common/test_utils.py @@ -1,10 +1,10 @@ import sqlalchemy from data_tools.environment.pytest import PytestConfig -from data_tools.src.common.utils import init_db +from data_tools.src.common.db import init_db_from_config def test_init_db(db_service): _, engine = db_service - engine, metadata_obj = init_db(PytestConfig(), engine) + engine, metadata_obj = init_db_from_config(PytestConfig(), engine) assert isinstance(engine, sqlalchemy.engine.Engine) assert isinstance(metadata_obj, sqlalchemy.MetaData) diff --git a/backend/data_tools/tests/load_cans/test_load_cans.py b/backend/data_tools/tests/load_cans/test_load_cans.py index 579e3f3125..7f3d3c06a2 100644 --- a/backend/data_tools/tests/load_cans/test_load_cans.py +++ b/backend/data_tools/tests/load_cans/test_load_cans.py @@ -6,21 +6,14 @@ from data_tools.src.common.utils import get_or_create_sys_user from data_tools.src.import_static_data.import_data import get_config from data_tools.src.load_cans.main import main -from data_tools.src.load_cans.utils import ( - CANData, - create_can_data, - create_models, - persist_models, - validate_all, - validate_data, -) +from data_tools.src.load_cans.utils import CANData, create_can_data, create_models, validate_all, validate_data from sqlalchemy import and_, text from models import * # noqa: F403, F401 @pytest.fixture() -def test_division(loaded_db): +def db_with_divisions(loaded_db): division = loaded_db.get(Division, 999) if not division: @@ -32,32 +25,40 @@ def test_division(loaded_db): loaded_db.merge(division) loaded_db.commit() - yield division + yield loaded_db @pytest.fixture() -def test_portfolio(loaded_db, test_division): - portfolio_1 = loaded_db.get(Portfolio, 1) - portfolio_2 = loaded_db.get(Portfolio, 2) - - if not portfolio_1 or not portfolio_2: - portfolio_1 = Portfolio( - id=1, - abbreviation="HMRF", - name="Healthy Marriages Responsible Fatherhood", - division_id=test_division.id, - ) +def db_with_portfolios(db_with_divisions): + portfolio_1 = Portfolio( + id=1, + abbreviation="HMRF", + name="Healthy Marriages Responsible Fatherhood", + division_id=999, + ) - portfolio_2 = Portfolio( - id=2, - abbreviation="CC", - name="Child Care", - division_id=test_division.id, - ) + portfolio_2 = Portfolio( + id=2, + abbreviation="CC", + name="Child Care", + division_id=999, + ) - loaded_db.add_all([portfolio_1, portfolio_2]) - loaded_db.commit() + db_with_divisions.add_all([portfolio_1, portfolio_2]) + db_with_divisions.commit() + + yield db_with_divisions - yield portfolio_1, portfolio_2 + db_with_divisions.execute(text("DELETE FROM portfolio")) + db_with_divisions.execute(text("DELETE FROM portfolio_version")) + db_with_divisions.commit() + + # Cleanup + db_with_divisions.execute(text("DELETE FROM can")) + db_with_divisions.execute(text("DELETE FROM can_funding_details")) + db_with_divisions.execute(text("DELETE FROM can_version")) + db_with_divisions.execute(text("DELETE FROM can_funding_details_version")) + db_with_divisions.execute(text("DELETE FROM ops_db_history")) + db_with_divisions.execute(text("DELETE FROM ops_db_history_version")) def test_get_config_default(): @@ -136,19 +137,7 @@ def test_create_models_no_can_nbr(): NICK_NAME="HMRF-OPRE", ) -def test_create_models(sys_user): - portfolios = [ - Portfolio( - abbreviation="HMRF", - name="Healthy Marriages Responsible Fatherhood", - ), - Portfolio( - abbreviation="CC", - name="Child Care", - ), - ] - - +def test_create_models(db_with_portfolios): data = CANData( SYS_CAN_ID=500, CAN_NBR="G99HRF2", @@ -167,31 +156,105 @@ def test_create_models(sys_user): NICK_NAME="HMRF-OPRE", ) - models = create_models(data, sys_user, portfolios) + sys_user = User( + email="system.admin@localhost", + ) + create_models(data, sys_user, db_with_portfolios) - assert len(models) == 2 + can_model = db_with_portfolios.get(CAN, 500) + can_funding_details = db_with_portfolios.execute(select(CANFundingDetails).where(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar_one_or_none() - can_model = next(m for m in models if isinstance(m, CAN)) assert can_model.id == 500 assert can_model.number == "G99HRF2" assert can_model.description == "Healthy Marriages Responsible Fatherhood - OPRE" assert can_model.nick_name == "HMRF-OPRE" - assert can_model.portfolio == next(p for p in portfolios if p.abbreviation == "HMRF") - assert can_model.funding_details == next(m for m in models if isinstance(m, CANFundingDetails)) - - funding_details_model = next(m for m in models if isinstance(m, CANFundingDetails)) - assert funding_details_model.fiscal_year == 2023 - assert funding_details_model.fund_code == "AAXXXX20231DAD" - assert funding_details_model.allowance == "0000000001" - assert funding_details_model.sub_allowance == "9KRZ2ND" - assert funding_details_model.allotment == "YZC6S1JUGUN" - assert funding_details_model.appropriation == "XX-23-XXXX" - assert funding_details_model.method_of_transfer == CANMethodOfTransfer.DIRECT - assert funding_details_model.funding_source == CANFundingSource.OPRE - assert funding_details_model.active_period == 1 - assert funding_details_model.obligate_by == 2024 - -def test_persist_models(loaded_db, sys_user, test_division, test_portfolio): + assert can_model.portfolio == db_with_portfolios.execute(select(Portfolio).where(Portfolio.abbreviation == "HMRF")).scalar_one_or_none() + assert can_model.funding_details == can_funding_details + + assert can_funding_details.fiscal_year == 2023 + assert can_funding_details.fund_code == "AAXXXX20231DAD" + assert can_funding_details.allowance == "0000000001" + assert can_funding_details.sub_allowance == "9KRZ2ND" + assert can_funding_details.allotment == "YZC6S1JUGUN" + assert can_funding_details.appropriation == "XX-23-XXXX" + assert can_funding_details.method_of_transfer == CANMethodOfTransfer.DIRECT + assert can_funding_details.funding_source == CANFundingSource.OPRE + assert can_funding_details.active_period == 1 + assert can_funding_details.obligate_by == 2024 + + # Cleanup + db_with_portfolios.execute(text("DELETE FROM can")) + db_with_portfolios.execute(text("DELETE FROM can_funding_details")) + db_with_portfolios.execute(text("DELETE FROM can_version")) + db_with_portfolios.execute(text("DELETE FROM can_funding_details_version")) + db_with_portfolios.execute(text("DELETE FROM ops_db_history")) + db_with_portfolios.execute(text("DELETE FROM ops_db_history_version")) + + +def test_main(db_with_portfolios): + result = CliRunner().invoke( + main, + [ + "--env", + "pytest", + "--input-csv", + "test_csv/can_valid.tsv", + ], + ) + + assert result.exit_code == 0 + + # make sure the data was loaded + can_1 = db_with_portfolios.get(CAN, 500) + assert can_1.number == "G99HRF2" + assert can_1.description == "Healthy Marriages Responsible Fatherhood - OPRE" + assert can_1.nick_name == "HMRF-OPRE" + assert can_1.portfolio == db_with_portfolios.execute(select(Portfolio).where(Portfolio.abbreviation == "HMRF")).scalar_one_or_none() + assert can_1.funding_details == db_with_portfolios.execute(select(CANFundingDetails).where(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar_one_or_none() + assert can_1.funding_details.fiscal_year == 2023 + assert can_1.funding_details.fund_code == "AAXXXX20231DAD" + assert can_1.funding_details.allowance == "0000000001" + assert can_1.funding_details.sub_allowance == "9KRZ2ND" + assert can_1.funding_details.allotment == "YZC6S1JUGUN" + assert can_1.funding_details.appropriation == "XX-23-XXXX" + assert can_1.funding_details.method_of_transfer == CANMethodOfTransfer.DIRECT + assert can_1.funding_details.funding_source == CANFundingSource.OPRE + assert can_1.funding_details.created_by == get_or_create_sys_user(db_with_portfolios).id + + can_2 = db_with_portfolios.get(CAN, 505) + assert can_2.number == "G994648" + assert can_2.description == "Kinship Navigation" + assert can_2.nick_name == "Kin-Nav" + assert can_2.portfolio == db_with_portfolios.execute(select(Portfolio).where(Portfolio.abbreviation == "HMRF")).scalar_one_or_none() + assert can_2.funding_details == db_with_portfolios.execute(select(CANFundingDetails).where(CANFundingDetails.fund_code == "FFXXXX20215DAD")).scalar_one_or_none() + assert can_2.funding_details.fiscal_year == 2021 + assert can_2.funding_details.fund_code == "FFXXXX20215DAD" + assert can_2.funding_details.allowance == "0000000006" + assert can_2.funding_details.sub_allowance == "G4N2ZIV" + assert can_2.funding_details.allotment == "KCTQYEKJ4F6" + assert can_2.funding_details.appropriation == "XX-21-XXXX" + assert can_2.funding_details.method_of_transfer == CANMethodOfTransfer.IAA + assert can_2.funding_details.funding_source == CANFundingSource.OPRE + assert can_2.funding_details.created_by == get_or_create_sys_user(db_with_portfolios).id + + history_objs = db_with_portfolios.execute(select(OpsDBHistory).where(OpsDBHistory.class_name == "CAN")).scalars().all() + assert len(history_objs) == 13 + + can_1_history = db_with_portfolios.execute(select(OpsDBHistory).where(and_(OpsDBHistory.row_key == "500", OpsDBHistory.class_name == "CAN"))).scalars().all() + assert len(can_1_history) == 1 + + # Cleanup + db_with_portfolios.execute(text("DELETE FROM can")) + db_with_portfolios.execute(text("DELETE FROM can_funding_details")) + db_with_portfolios.execute(text("DELETE FROM can_version")) + db_with_portfolios.execute(text("DELETE FROM can_funding_details_version")) + db_with_portfolios.execute(text("DELETE FROM ops_db_history")) + db_with_portfolios.execute(text("DELETE FROM ops_db_history_version")) + + +def test_create_models_upsert(db_with_portfolios): + sys_user = get_or_create_sys_user(db_with_portfolios) + data_1 = CANData( SYS_CAN_ID=500, CAN_NBR="G99HRF2", @@ -211,7 +274,7 @@ def test_persist_models(loaded_db, sys_user, test_division, test_portfolio): ) data_2 = CANData( - SYS_CAN_ID=501, + SYS_CAN_ID=500, CAN_NBR="G99HRF3", CAN_DESCRIPTION="Healthy Marriages Responsible Fatherhood - OPRE", FUND="AAXXXX20231DAD", @@ -222,96 +285,88 @@ def test_persist_models(loaded_db, sys_user, test_division, test_portfolio): APPROP_PREFIX="XX", APPROP_POSTFIX="XXXX", APPROP_YEAR="23", - PORTFOLIO="CC", + PORTFOLIO="HMRF", FUNDING_SOURCE="OPRE", METHOD_OF_TRANSFER="DIRECT", NICK_NAME="HMRF-OPRE", ) - models = create_models(data_1, sys_user, list(test_portfolio)) + create_models(data_2, sys_user, list(test_portfolio)) + data_3 = CANData( + SYS_CAN_ID=500, + CAN_NBR="G99HRF3", + CAN_DESCRIPTION="Healthy Marriages Responsible Fatherhood - OPRE", + FUND="AAXXXX20231DAE", + ALLOWANCE="0000000001", + ALLOTMENT_ORG="YZC6S1JUGUN", + SUB_ALLOWANCE="9KRZ2ND", + CURRENT_FY_FUNDING_YTD=880000.0, + APPROP_PREFIX="XX", + APPROP_POSTFIX="XXXX", + APPROP_YEAR="23", + PORTFOLIO="HMRF", + FUNDING_SOURCE="OPRE", + METHOD_OF_TRANSFER="DIRECT", + NICK_NAME="HMRF-OPRE", + ) - persist_models(models, loaded_db) + create_models(data_1, sys_user, db_with_portfolios) - can_1 = loaded_db.get(CAN, 500) + # make sure the data was loaded + can_1 = db_with_portfolios.get(CAN, 500) assert can_1.number == "G99HRF2" assert can_1.description == "Healthy Marriages Responsible Fatherhood - OPRE" assert can_1.nick_name == "HMRF-OPRE" - assert can_1.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() - assert can_1.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar() + assert can_1.portfolio == db_with_portfolios.execute(select(Portfolio).where(Portfolio.abbreviation == "HMRF")).scalar_one_or_none() + assert can_1.funding_details.id == db_with_portfolios.execute( + select(CANFundingDetails).where(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar_one_or_none().id assert can_1.created_by == sys_user.id # make sure the version records were created assert can_1.versions[0].number == "G99HRF2" assert can_1.versions[0].description == "Healthy Marriages Responsible Fatherhood - OPRE" assert can_1.versions[0].nick_name == "HMRF-OPRE" - assert can_1.versions[0].portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar().versions[0] - assert can_1.versions[0].funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar().versions[0] + assert can_1.versions[0].portfolio == db_with_portfolios.execute(select(Portfolio).where(Portfolio.abbreviation == "HMRF")).scalar_one_or_none().versions[0] + assert can_1.versions[0].funding_details == db_with_portfolios.execute(select(CANFundingDetails).where(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar_one_or_none().versions[0] assert can_1.versions[0].created_by == sys_user.id # make sure the history records are created - history_records = loaded_db.execute(select(OpsDBHistory).filter(OpsDBHistory.class_name == "CAN").order_by(OpsDBHistory.created_on.desc())).scalars().all() - assert len(history_records) == 2 - assert history_records[0].event_type == OpsDBHistoryType.NEW - assert history_records[0].row_key == "500" - assert history_records[1].event_type == OpsDBHistoryType.NEW - assert history_records[1].row_key == "501" - assert history_records[0].created_by == sys_user.id - assert history_records[1].created_by == sys_user.id - - # Cleanup - for model in models: - loaded_db.delete(model) - loaded_db.commit() - - -def test_main(loaded_db, mocker, test_division, test_portfolio): - result = CliRunner().invoke( - main, - [ - "--env", - "pytest", - "--input-csv", - "test_csv/can_valid.tsv", - ], - ) - - assert result.exit_code == 0 - - # make sure the data was loaded - can_1 = loaded_db.get(CAN, 500) - assert can_1.number == "G99HRF2" + history_record = db_with_portfolios.execute(select(OpsDBHistory).where(OpsDBHistory.class_name == "CAN").order_by(OpsDBHistory.created_on.desc())).scalar() + assert history_record is not None + assert history_record.event_type == OpsDBHistoryType.NEW + assert history_record.row_key == "500" + assert history_record.created_by == sys_user.id + + # upsert the same data - change the CAN number + create_models(data_2, sys_user, db_with_portfolios) + can_1 = db_with_portfolios.get(CAN, 500) + assert can_1.number == "G99HRF3" assert can_1.description == "Healthy Marriages Responsible Fatherhood - OPRE" assert can_1.nick_name == "HMRF-OPRE" - assert can_1.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() - assert can_1.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar() - assert can_1.funding_details.fiscal_year == 2023 - assert can_1.funding_details.fund_code == "AAXXXX20231DAD" - assert can_1.funding_details.allowance == "0000000001" - assert can_1.funding_details.sub_allowance == "9KRZ2ND" - assert can_1.funding_details.allotment == "YZC6S1JUGUN" - assert can_1.funding_details.appropriation == "XX-23-XXXX" - assert can_1.funding_details.method_of_transfer == CANMethodOfTransfer.DIRECT - assert can_1.funding_details.funding_source == CANFundingSource.OPRE - assert can_1.funding_details.created_by == get_or_create_sys_user(loaded_db).id + assert can_1.portfolio == db_with_portfolios.execute( + select(Portfolio).where(Portfolio.abbreviation == "HMRF")).scalar_one_or_none() + assert can_1.funding_details.id == db_with_portfolios.execute( + select(CANFundingDetails).where(CANFundingDetails.fund_code == "AAXXXX20231DAD")).scalar_one_or_none().id + assert can_1.created_by == sys_user.id - can_2 = loaded_db.get(CAN, 505) - assert can_2.number == "G994648" - assert can_2.description == "Kinship Navigation" - assert can_2.nick_name == "Kin-Nav" - assert can_2.portfolio == loaded_db.execute(select(Portfolio).filter(Portfolio.abbreviation == "HMRF")).scalar() - assert can_2.funding_details == loaded_db.execute(select(CANFundingDetails).filter(CANFundingDetails.fund_code == "FFXXXX20215DAD")).scalar() - assert can_2.funding_details.fiscal_year == 2021 - assert can_2.funding_details.fund_code == "FFXXXX20215DAD" - assert can_2.funding_details.allowance == "0000000006" - assert can_2.funding_details.sub_allowance == "G4N2ZIV" - assert can_2.funding_details.allotment == "KCTQYEKJ4F6" - assert can_2.funding_details.appropriation == "XX-21-XXXX" - assert can_2.funding_details.method_of_transfer == CANMethodOfTransfer.IAA - assert can_2.funding_details.funding_source == CANFundingSource.OPRE - assert can_2.funding_details.created_by == get_or_create_sys_user(loaded_db).id + # upsert the same data - change the fund code + create_models(data_3, sys_user, db_with_portfolios) + can_1 = db_with_portfolios.get(CAN, 500) + assert can_1.number == "G99HRF3" + assert can_1.description == "Healthy Marriages Responsible Fatherhood - OPRE" + assert can_1.nick_name == "HMRF-OPRE" + assert can_1.portfolio == db_with_portfolios.execute( + select(Portfolio).where(Portfolio.abbreviation == "HMRF")).scalar_one_or_none() + assert can_1.funding_details.id == db_with_portfolios.execute( + select(CANFundingDetails).where(CANFundingDetails.fund_code == "AAXXXX20231DAE")).scalar_one_or_none().id + assert can_1.created_by == sys_user.id - history_objs = loaded_db.execute(select(OpsDBHistory).filter(OpsDBHistory.class_name == "CAN")).scalars().all() - assert len(history_objs) == 13 + assert len(db_with_portfolios.execute(select(CAN)).scalars().all()) == 1 + assert len(db_with_portfolios.execute(select(CANFundingDetails)).scalars().all()) == 2 - can_1_history = loaded_db.execute(select(OpsDBHistory).where(and_(OpsDBHistory.row_key == "500", OpsDBHistory.class_name == "CAN"))).scalars().all() - assert len(can_1_history) == 1 + # Cleanup + db_with_portfolios.execute(text("DELETE FROM can")) + db_with_portfolios.execute(text("DELETE FROM can_funding_details")) + db_with_portfolios.execute(text("DELETE FROM can_version")) + db_with_portfolios.execute(text("DELETE FROM can_funding_details_version")) + db_with_portfolios.execute(text("DELETE FROM ops_db_history")) + db_with_portfolios.execute(text("DELETE FROM ops_db_history_version")) From fc04ee4623d6515de24f81269c1c6e2245ab5fe6 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 16:04:26 -0400 Subject: [PATCH 30/34] add comment and remove dead code Signed-off-by: John DeAngelis --- backend/data_tools/src/load_cans/utils.py | 54 ++++------------------- 1 file changed, 8 insertions(+), 46 deletions(-) diff --git a/backend/data_tools/src/load_cans/utils.py b/backend/data_tools/src/load_cans/utils.py index 288dce46ac..04a26217ef 100644 --- a/backend/data_tools/src/load_cans/utils.py +++ b/backend/data_tools/src/load_cans/utils.py @@ -88,7 +88,14 @@ def validate_all(data: List[CANData]) -> bool: def create_models(data: CANData, sys_user: User, session: Session) -> None: """ - Convert a CanData instance to a BaseModel instance. + Create and persist the CAN and CANFundingDetails models. + + The CANData does not contain a SYS_ID for the CANFundDetails model. + A check is made to see if the funding details already exist in the database by comparing the fields. + If the funding details do not exist, a new instance is created and persisted to the database. + If it does exist, the existing instance is associated with the CAN model. + This means that the funding details are not duplicated in the database but is also not updated if the data changes. + The CAN model is upserted, however. :param data: The CanData instance to convert. :param sys_user: The system user to use. @@ -210,48 +217,3 @@ def transform(data: DictReader, portfolios: List[Portfolio], session: Session, s create_all_models(can_data, sys_user, session) logger.info(f"Created models.") - - -def create_can_funding_details_model(data: CANData, sys_user: User, session: Session) -> BaseModel: - logger.debug(f"Creating model for {data}") - - try: - fiscal_year = int(data.FUND[6:10]) - fund_code = data.FUND - allowance = data.ALLOWANCE - sub_allowance = data.SUB_ALLOWANCE - allotment = data.ALLOTMENT_ORG - appropriation = "-".join([data.APPROP_PREFIX or "", data.APPROP_YEAR[0:2] or "", data.APPROP_POSTFIX or ""]) - method_of_transfer = CANMethodOfTransfer[data.METHOD_OF_TRANSFER] - funding_source = CANFundingSource[data.FUNDING_SOURCE] - - existing_funding_details = session.execute(select(CANFundingDetails).where( - and_( - CANFundingDetails.fiscal_year == fiscal_year, - CANFundingDetails.fund_code == fund_code, - CANFundingDetails.allowance == allowance, - CANFundingDetails.sub_allowance == sub_allowance, - CANFundingDetails.allotment == allotment, - CANFundingDetails.appropriation == appropriation, - CANFundingDetails.method_of_transfer == method_of_transfer, - CANFundingDetails.funding_source == funding_source, - ))).scalar_one_or_none() - - if not existing_funding_details: - funding_details = CANFundingDetails( - fiscal_year=fiscal_year, - fund_code=fund_code, - allowance=allowance, - sub_allowance=sub_allowance, - allotment=allotment, - appropriation=appropriation, - method_of_transfer=method_of_transfer, - funding_source=funding_source, - created_by=sys_user.id, - ) - return funding_details - else: - return existing_funding_details - except Exception as e: - logger.error(f"Error creating model for {data}") - raise e From 242e31b38765276458e157b4e777ac6df3464c08 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 16:27:25 -0400 Subject: [PATCH 31/34] update to use init_db_from_config Signed-off-by: John DeAngelis --- backend/data_tools/src/disable_users/disable_users.py | 3 ++- backend/data_tools/src/import_static_data/import_data.py | 4 ++-- backend/data_tools/src/load_cans/utils.py | 2 +- backend/models/base.py | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/backend/data_tools/src/disable_users/disable_users.py b/backend/data_tools/src/disable_users/disable_users.py index 71a239fcd1..c0f7018724 100644 --- a/backend/data_tools/src/disable_users/disable_users.py +++ b/backend/data_tools/src/disable_users/disable_users.py @@ -2,6 +2,7 @@ import sys import time +from data_tools.src.common.db import init_db_from_config from data_tools.src.common.utils import get_or_create_sys_user from data_tools.src.disable_users.queries import ( ALL_ACTIVE_USER_SESSIONS_QUERY, @@ -97,7 +98,7 @@ def update_disabled_users_status(conn: sqlalchemy.engine.Engine): script_env = os.getenv("ENV") script_config = get_config(script_env) - db_engine, db_metadata_obj = init_db(script_config) + db_engine, db_metadata_obj = init_db_from_config(script_config) event.listen(Mapper, "after_configured", setup_schema(BaseModel)) diff --git a/backend/data_tools/src/import_static_data/import_data.py b/backend/data_tools/src/import_static_data/import_data.py index 1a6ca15ab2..2013d9e01a 100644 --- a/backend/data_tools/src/import_static_data/import_data.py +++ b/backend/data_tools/src/import_static_data/import_data.py @@ -2,7 +2,7 @@ import os import json5 -from data_tools.src.common.db import init_db +from data_tools.src.common.db import init_db, init_db_from_config from data_tools.src.common.utils import get_config from sqlalchemy import text from sqlalchemy.engine import Connection, Engine @@ -158,7 +158,7 @@ def import_data(engine: Engine, data: dict[str, Any]) -> None: script_env = os.getenv("ENV") script_config = get_config(script_env) - db_engine, db_metadata_obj = init_db(script_config) + db_engine, db_metadata_obj = init_db_from_config(script_config) global_data = get_data_to_import() diff --git a/backend/data_tools/src/load_cans/utils.py b/backend/data_tools/src/load_cans/utils.py index 04a26217ef..b502cafc10 100644 --- a/backend/data_tools/src/load_cans/utils.py +++ b/backend/data_tools/src/load_cans/utils.py @@ -216,4 +216,4 @@ def transform(data: DictReader, portfolios: List[Portfolio], session: Session, s logger.info("Data validation passed.") create_all_models(can_data, sys_user, session) - logger.info(f"Created models.") + logger.info(f"Finished loading models.") diff --git a/backend/models/base.py b/backend/models/base.py index ccf6568052..f9fc4c0748 100644 --- a/backend/models/base.py +++ b/backend/models/base.py @@ -152,7 +152,7 @@ def to_dict(self): else None ) except marshmallow.exceptions.RegistryError: - logger.error("SafeUserSchema not found in marshmallow class registry") + logger.debug("SafeUserSchema not found in marshmallow class registry") return data From 4c321401fa0f6f261d603291c0ebffa329151520 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 17:08:57 -0400 Subject: [PATCH 32/34] fix bug in disable user query; add event_details to disable user event Signed-off-by: John DeAngelis --- .../src/disable_users/disable_users.py | 19 +++++++++----- .../data_tools/src/disable_users/queries.py | 26 +++++++++++-------- .../tests/disable_users/test_disable_users.py | 3 ++- 3 files changed, 30 insertions(+), 18 deletions(-) diff --git a/backend/data_tools/src/disable_users/disable_users.py b/backend/data_tools/src/disable_users/disable_users.py index c0f7018724..cee62c8411 100644 --- a/backend/data_tools/src/disable_users/disable_users.py +++ b/backend/data_tools/src/disable_users/disable_users.py @@ -1,14 +1,15 @@ import os import sys import time +from datetime import timedelta -from data_tools.src.common.db import init_db_from_config +from data_tools.src.common.db import init_db_from_config, setup_triggers from data_tools.src.common.utils import get_or_create_sys_user from data_tools.src.disable_users.queries import ( ALL_ACTIVE_USER_SESSIONS_QUERY, EXCLUDED_USER_OIDC_IDS, GET_USER_ID_BY_OIDC_QUERY, - INACTIVE_USER_QUERY, + get_latest_user_session, ) from data_tools.src.import_static_data.import_data import get_config, init_db from sqlalchemy import text @@ -53,6 +54,7 @@ def disable_user(se, user_id, system_admin_id): event_type=OpsEventType.UPDATE_USER, event_status=OpsEventStatus.SUCCESS, created_by=system_admin_id, + event_details={"user_id": user_id, "message": "User deactivated via automated process."}, ) se.add(ops_event) @@ -72,11 +74,18 @@ def update_disabled_users_status(conn: sqlalchemy.engine.Engine): with Session(conn) as se: logger.info("Checking for System User.") system_admin = get_or_create_sys_user(se) - system_admin_id = system_admin.id + setup_triggers(se, system_admin) + logger.info("Fetching inactive users.") - results = se.execute(text(INACTIVE_USER_QUERY)).scalars().all() + results = [] + all_users = se.execute(select(User)).scalars().all() + for user in all_users: + latest_session = get_latest_user_session(user_id=user.id, session=se) + if latest_session and latest_session.last_active_at < datetime.now() - timedelta(days=60): + results.append(user.id) + excluded_ids = get_ids_from_oidc_ids(se, EXCLUDED_USER_OIDC_IDS) user_ids = [uid for uid in results if uid not in excluded_ids] @@ -100,8 +109,6 @@ def update_disabled_users_status(conn: sqlalchemy.engine.Engine): script_config = get_config(script_env) db_engine, db_metadata_obj = init_db_from_config(script_config) - event.listen(Mapper, "after_configured", setup_schema(BaseModel)) - update_disabled_users_status(db_engine) logger.info("Disable Inactive Users process complete.") diff --git a/backend/data_tools/src/disable_users/queries.py b/backend/data_tools/src/disable_users/queries.py index c3f280a3b2..be465bd172 100644 --- a/backend/data_tools/src/disable_users/queries.py +++ b/backend/data_tools/src/disable_users/queries.py @@ -1,4 +1,8 @@ from data_tools.src.common.utils import SYSTEM_ADMIN_OIDC_ID +from sqlalchemy import select +from sqlalchemy.orm import Session + +from models import UserSession EXCLUDED_USER_OIDC_IDS = [ "00000000-0000-1111-a111-000000000018", # Admin Demo @@ -9,17 +13,6 @@ SYSTEM_ADMIN_OIDC_ID # System Admin ] -INACTIVE_USER_QUERY = ( - "SELECT id " - "FROM ops_user " - "WHERE id IN ( " - " SELECT ou.id " - " FROM user_session JOIN ops_user ou ON user_session.user_id = ou.id " - " WHERE ou.status = 'ACTIVE' " - " AND user_session.last_active_at < CURRENT_TIMESTAMP - INTERVAL '60 days'" - ");" -) - ALL_ACTIVE_USER_SESSIONS_QUERY = ( "SELECT * " "FROM user_session " @@ -28,3 +21,14 @@ ) GET_USER_ID_BY_OIDC_QUERY = "SELECT id FROM ops_user WHERE oidc_id = :oidc_id" + +def get_latest_user_session(user_id: int, session: Session) -> UserSession | None: + return ( + session.execute( + select(UserSession) + .where(UserSession.user_id == user_id) # type: ignore + .order_by(UserSession.created_on.desc()) + ) + .scalars() + .first() + ) diff --git a/backend/data_tools/tests/disable_users/test_disable_users.py b/backend/data_tools/tests/disable_users/test_disable_users.py index 073331d239..c66569a036 100644 --- a/backend/data_tools/tests/disable_users/test_disable_users.py +++ b/backend/data_tools/tests/disable_users/test_disable_users.py @@ -47,8 +47,9 @@ def test_deactivate_user(mock_session): @patch("data_tools.src.disable_users.disable_users.logger") def test_no_inactive_users(mock_logger, mock_session, mocker): mocker.patch("data_tools.src.disable_users.disable_users.get_or_create_sys_user", return_value=User(id=system_admin_id)) + mocker.patch("data_tools.src.disable_users.disable_users.get_latest_user_session", return_value=[]) + mocker.patch("data_tools.src.disable_users.disable_users.setup_triggers") - mock_session.execute.return_value.all.return_value = None update_disabled_users_status(mock_session) mock_logger.info.assert_any_call("Checking for System User.") From 682c80e03d19ad4150a55dc92275b0fe861bdeb0 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 17:13:27 -0400 Subject: [PATCH 33/34] remove commented code Signed-off-by: John DeAngelis --- backend/data_tools/src/common/utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/backend/data_tools/src/common/utils.py b/backend/data_tools/src/common/utils.py index e4e9988deb..6aa0676cfd 100644 --- a/backend/data_tools/src/common/utils.py +++ b/backend/data_tools/src/common/utils.py @@ -42,7 +42,5 @@ def get_or_create_sys_user(session: Session) -> User: if not user: user = User(email=SYSTEM_ADMIN_EMAIL, oidc_id=UUID(SYSTEM_ADMIN_OIDC_ID)) - # session.add(user) - # session.commit() return user From c0fd2d225eb50485ea69b33e77a0a66475898c23 Mon Sep 17 00:00:00 2001 From: John DeAngelis Date: Wed, 23 Oct 2024 21:24:13 -0400 Subject: [PATCH 34/34] fix BE unit tests Signed-off-by: John DeAngelis --- backend/data_tools/environment/pytest.py | 2 +- .../environment/pytest_data_tools.py | 33 +++++++++++++++++++ backend/data_tools/src/common/utils.py | 3 ++ .../tests/load_cans/test_load_cans.py | 2 +- backend/ops_api/tests/__inti__.py | 0 5 files changed, 38 insertions(+), 2 deletions(-) create mode 100644 backend/data_tools/environment/pytest_data_tools.py delete mode 100644 backend/ops_api/tests/__inti__.py diff --git a/backend/data_tools/environment/pytest.py b/backend/data_tools/environment/pytest.py index 5069d07a1b..c309dca67d 100644 --- a/backend/data_tools/environment/pytest.py +++ b/backend/data_tools/environment/pytest.py @@ -5,7 +5,7 @@ class PytestConfig(DataToolsConfig): @property def db_connection_string(self) -> str: return ( - "postgresql://postgres:local_password@localhost:54321/postgres" # pragma: allowlist secret + "postgresql://ops:ops@unittest_db:5432/postgres" # pragma: allowlist secret ) @property diff --git a/backend/data_tools/environment/pytest_data_tools.py b/backend/data_tools/environment/pytest_data_tools.py new file mode 100644 index 0000000000..8f2b90c1cd --- /dev/null +++ b/backend/data_tools/environment/pytest_data_tools.py @@ -0,0 +1,33 @@ +from data_tools.environment.types import DataToolsConfig + + +class PytestDataToolsConfig(DataToolsConfig): + @property + def db_connection_string(self) -> str: + return ( + "postgresql://postgres:local_password@localhost:54321/postgres" # pragma: allowlist secret + ) + + @property + def verbosity(self) -> bool: + return True + + @property + def is_remote(self) -> bool: + return False + + @property + def file_system_path(self) -> str: + return "." + + @property + def vault_url(self) -> str | None: + return None + + @property + def vault_file_storage_key(self) -> str | None: + return None + + @property + def file_storage_auth_method(self) -> str | None: + return None diff --git a/backend/data_tools/src/common/utils.py b/backend/data_tools/src/common/utils.py index 6aa0676cfd..d149e1a8e5 100644 --- a/backend/data_tools/src/common/utils.py +++ b/backend/data_tools/src/common/utils.py @@ -5,6 +5,7 @@ from data_tools.environment.dev import DevConfig from data_tools.environment.local import LocalConfig from data_tools.environment.pytest import PytestConfig +from data_tools.environment.pytest_data_tools import PytestDataToolsConfig from data_tools.environment.types import DataToolsConfig from sqlalchemy import select from sqlalchemy.orm import Session @@ -25,6 +26,8 @@ def get_config(environment_name: Optional[str] = None) -> DataToolsConfig: config = LocalConfig() case "pytest": config = PytestConfig() + case "pytest_data_tools": + config = PytestDataToolsConfig() case _: config = DevConfig() return config diff --git a/backend/data_tools/tests/load_cans/test_load_cans.py b/backend/data_tools/tests/load_cans/test_load_cans.py index 7f3d3c06a2..af8acb57b4 100644 --- a/backend/data_tools/tests/load_cans/test_load_cans.py +++ b/backend/data_tools/tests/load_cans/test_load_cans.py @@ -196,7 +196,7 @@ def test_main(db_with_portfolios): main, [ "--env", - "pytest", + "pytest_data_tools", "--input-csv", "test_csv/can_valid.tsv", ], diff --git a/backend/ops_api/tests/__inti__.py b/backend/ops_api/tests/__inti__.py deleted file mode 100644 index e69de29bb2..0000000000