From 99718698dc22610c847cceef5cc19b3a3cfc4c26 Mon Sep 17 00:00:00 2001 From: leodube-aot <122323255+leodube-aot@users.noreply.github.com> Date: Thu, 1 Feb 2024 15:42:26 -0800 Subject: [PATCH] 19373 Update CI for queue services (#2424) * Add workflow_dispatch to ci jobs * Update queue-services CI jobs * Update queue-services pyproject.toml files * Update queue-services poetry.lock files * Run black formatter on queue-services * Fix pylint errors on queue-services * Update CI yaml files * Run isort on queue-services * Run black again on entity-auth * Run black again on entity-filer * Fix pylint issues on entity_auth * Fix pylint issues on entity_bn src * Fix pylint issues on entity_emailer src * Fix pylint issues on entity_pay src * Fix remaining lint src issues (except for filer) * Fix remaining flake8 src issues (except for filer) * Update yaml and toml files to use Backend CI * Run isort again * Fix new flake8 issues * Add empty init files * Run black on init files * Fix linting issues entity_filer * Update noqa comment placement in filer * Add pytest coverage and configuration * Update pylint coverage config * Update coverage.run config --- .github/workflows/business-auth-ci.yml | 117 +--- .github/workflows/business-bn-ci.yml | 106 +-- .github/workflows/business-emailer-ci.yml | 122 +--- .github/workflows/business-filer-ci.yml | 113 +--- .github/workflows/business-pay-ci.yml | 108 +-- queue_services/entity-auth/__init__.py | 15 + queue_services/entity-auth/poetry.lock | 448 ++++++++++++- queue_services/entity-auth/pyproject.toml | 125 +++- .../entity-auth/src/entity_auth/__init__.py | 5 +- .../entity-auth/src/entity_auth/config.py | 15 +- .../src/entity_auth/resources/__init__.py | 1 + .../src/entity_auth/resources/worker.py | 36 +- .../src/entity_auth/services/__init__.py | 1 - .../src/entity_auth/services/bootstrap.py | 20 +- .../src/entity_auth/services/gcp_queue.py | 59 +- .../src/entity_auth/services/logging.py | 19 +- .../src/entity_auth/services/name_request.py | 11 +- queue_services/entity-auth/tests/__init__.py | 1 - queue_services/entity-auth/tests/conftest.py | 50 +- .../entity-auth/tests/unit/__init__.py | 46 +- .../entity-auth/tests/unit/test_version.py | 4 +- .../entity-auth/tests/unit/test_worker.py | 26 +- queue_services/entity-bn/__init__.py | 15 + queue_services/entity-bn/poetry.lock | 465 ++++++++++++- queue_services/entity-bn/pyproject.toml | 125 +++- .../entity-bn/src/entity_bn/__init__.py | 3 +- .../src/entity_bn/bn_processors/__init__.py | 22 +- .../bn_processors/change_of_registration.py | 72 +- .../src/entity_bn/bn_processors/correction.py | 18 +- .../dissolution_or_put_back_on.py | 21 +- .../entity_bn/bn_processors/registration.py | 60 +- .../entity-bn/src/entity_bn/config.py | 18 +- .../src/entity_bn/resources/__init__.py | 1 + .../src/entity_bn/resources/worker.py | 17 +- .../src/entity_bn/services/__init__.py | 1 - .../src/entity_bn/services/gcp_queue.py | 59 +- .../src/entity_bn/services/logging.py | 19 +- queue_services/entity-bn/tests/__init__.py | 1 - queue_services/entity-bn/tests/conftest.py | 27 +- .../entity-bn/tests/pytest_marks.py | 3 +- .../entity-bn/tests/unit/__init__.py | 20 +- .../tests/unit/bn_processors/test_admin.py | 15 +- .../test_change_of_registration.py | 39 +- .../unit/bn_processors/test_correction.py | 55 +- .../test_dissolution_or_put_back_on.py | 33 +- .../unit/bn_processors/test_registration.py | 17 +- .../entity-bn/tests/unit/test_version.py | 1 - queue_services/entity-emailer/__init__.py | 15 + queue_services/entity-emailer/poetry.lock | 450 ++++++++++++- queue_services/entity-emailer/pyproject.toml | 125 +++- .../src/entity_emailer/__init__.py | 3 +- .../src/entity_emailer/config.py | 19 +- .../email_processors/__init__.py | 45 +- .../affiliation_notification.py | 21 +- .../agm_extension_notification.py | 50 +- .../agm_location_change_notification.py | 49 +- .../ar_reminder_notification.py | 16 +- .../email_processors/bn_notification.py | 27 +- .../change_of_registration_notification.py | 65 +- .../consent_continuation_out_notification.py | 51 +- .../continuation_out_notification.py | 47 +- .../correction_notification.py | 76 +-- .../dissolution_notification.py | 55 +- .../email_processors/filing_notification.py | 85 +-- .../email_processors/mras_notification.py | 17 +- .../email_processors/name_request.py | 25 +- .../email_processors/nr_notification.py | 15 +- .../registration_notification.py | 51 +- .../restoration_notification.py | 47 +- .../special_resolution_helper.py | 31 +- .../special_resolution_notification.py | 44 +- .../src/entity_emailer/resources/__init__.py | 1 + .../src/entity_emailer/resources/worker.py | 61 +- .../src/entity_emailer/services/__init__.py | 1 - .../src/entity_emailer/services/gcp_queue.py | 59 +- .../src/entity_emailer/services/logging.py | 19 +- .../entity-emailer/tests/__init__.py | 1 - .../entity-emailer/tests/conftest.py | 33 +- .../entity-emailer/tests/pytest_marks.py | 6 +- .../entity-emailer/tests/unit/__init__.py | 509 ++++++-------- .../test_affiliation_notification.py | 30 +- .../test_agm_extension_notification.py | 33 +- .../test_agm_location_change_notification.py | 36 +- .../test_ar_reminder_notification.py | 16 +- .../email_processors/test_bn_notification.py | 45 +- ...est_change_of_registration_notification.py | 68 +- ...t_consent_continuation_out_notification.py | 56 +- .../test_continuation_out_notification.py | 54 +- .../test_correction_notification.py | 303 +++++---- ...test_cp_special_resolution_notification.py | 181 ++--- .../test_dissolution_notification.py | 137 ++-- .../test_filing_notification.py | 127 ++-- .../test_mras_notification.py | 13 +- .../email_processors/test_nr_notification.py | 121 ++-- .../test_registration_notification.py | 49 +- .../test_restoration_notification.py | 164 +++-- .../tests/unit/services/test_gcp_queue.py | 9 +- .../tests/unit/test_configuration.py | 17 +- .../entity-emailer/tests/unit/test_version.py | 1 + .../entity-emailer/tests/unit/test_worker.py | 622 +++++++++--------- queue_services/entity-filer/__init__.py | 15 + queue_services/entity-filer/poetry.lock | 569 ++++++++++++---- queue_services/entity-filer/pyproject.toml | 124 +++- .../entity-filer/src/entity_filer/__init__.py | 9 +- .../src/entity_filer/common/enum.py | 5 +- .../entity-filer/src/entity_filer/config.py | 7 +- .../src/entity_filer/exceptions/__init__.py | 1 - .../exceptions/error_messages/__init__.py | 1 - .../exceptions/error_messages/codes.py | 3 +- .../exceptions/error_messages/messages.py | 1 - .../src/entity_filer/filing_meta.py | 5 +- .../filing_processors/admin_freeze.py | 9 +- .../filing_processors/agm_extension.py | 22 +- .../filing_processors/agm_location_change.py | 2 +- .../filing_processors/alteration.py | 10 +- .../amalgamation_application.py | 44 +- .../filing_processors/annual_report.py | 12 +- .../filing_processors/change_of_address.py | 5 +- .../filing_processors/change_of_directors.py | 61 +- .../filing_processors/change_of_name.py | 2 +- .../change_of_registration.py | 33 +- .../consent_continuation_out.py | 13 +- .../filing_processors/continuation_out.py | 19 +- .../filing_processors/conversion.py | 61 +- .../filing_processors/correction.py | 16 +- .../filing_processors/court_order.py | 10 +- .../filing_processors/dissolution.py | 38 +- .../filing_components/__init__.py | 40 +- .../filing_components/aliases.py | 10 +- .../filing_components/alternate_name.py | 56 +- .../filing_components/correction.py | 64 +- .../filing_components/filings.py | 15 +- .../filing_components/legal_entity_info.py | 20 +- .../filing_components/name_request.py | 16 +- .../filing_components/parties.py | 83 +-- .../filing_components/resolutions.py | 2 +- .../filing_components/rules_and_memorandum.py | 5 +- .../filing_components/shares.py | 26 +- .../filing_processors/incorporation_filing.py | 46 +- .../filing_processors/put_back_on.py | 9 +- .../filing_processors/registrars_notation.py | 12 +- .../filing_processors/registrars_order.py | 12 +- .../filing_processors/registration.py | 100 +-- .../filing_processors/restoration.py | 40 +- .../filing_processors/special_resolution.py | 17 +- .../filing_processors/transition.py | 20 +- .../src/entity_filer/resources/worker.py | 125 +--- .../src/entity_filer/services/__init__.py | 1 - .../src/entity_filer/services/gcp_queue.py | 50 +- .../src/entity_filer/services/logging.py | 5 +- .../src/entity_filer/translations/__init__.py | 5 +- .../src/entity_filer/utils/datetime.py | 12 +- .../utils/legislation_datetime.py | 28 +- .../src/entity_filer/utils/utils.py | 11 +- queue_services/entity-filer/tests/__init__.py | 5 +- queue_services/entity-filer/tests/conftest.py | 18 +- .../entity-filer/tests/unit/__init__.py | 25 +- .../tests/unit/experiment/test_versioning.py | 1 - .../filing_components/test_offices.py | 13 +- .../test_parties_entity_roles.py | 48 +- .../filing_components/test_shares.py | 12 +- .../filing_components/utils.py | 12 +- .../filing_processors/test_admin_freeze.py | 7 +- .../test_agm_location_change.py | 5 +- .../unit/filing_processors/test_aliases.py | 40 +- .../unit/filing_processors/test_alteration.py | 38 +- .../filing_processors/test_annual_report.py | 21 +- .../test_continuation_out.py | 58 +- .../unit/filing_processors/test_conversion.py | 53 +- .../filing_processors/test_court_order.py | 13 +- .../filing_processors/test_dissolution.py | 42 +- .../test_incorporation_filing.py | 137 +--- .../filing_processors/test_put_back_on.py | 6 +- .../test_registrars_notation.py | 18 +- .../test_registrars_order.py | 18 +- .../filing_processors/test_registration.py | 41 +- .../test_special_resolution.py | 14 +- .../unit/filing_processors/test_transition.py | 12 +- .../tests/unit/worker/test_agm_extension.py | 10 +- .../worker/test_amalgamation_application.py | 22 +- .../worker/test_change_of_registration.py | 187 ++---- .../worker/test_consent_continuation_out.py | 42 +- .../tests/unit/worker/test_conversion.py | 51 +- .../tests/unit/worker/test_correction_bcia.py | 206 ++---- .../test_correction_special_resolution.py | 45 +- .../tests/unit/worker/test_incorporation.py | 43 +- .../tests/unit/worker/test_resource.py | 6 +- .../tests/unit/worker/test_restoration.py | 72 +- .../unit/worker/test_special_resolution.py | 5 +- .../unit/worker/test_technical_correction.py | 15 +- .../tests/unit/worker/test_transition.py | 19 +- queue_services/entity-pay/__init__.py | 15 + queue_services/entity-pay/poetry.lock | 450 ++++++++++++- queue_services/entity-pay/pyproject.toml | 125 +++- .../entity-pay/src/entity_pay/__init__.py | 3 +- .../entity-pay/src/entity_pay/config.py | 15 +- .../src/entity_pay/resources/__init__.py | 1 + .../src/entity_pay/resources/worker.py | 35 +- .../src/entity_pay/services/__init__.py | 1 - .../src/entity_pay/services/gcp_queue.py | 64 +- .../src/entity_pay/services/logging.py | 19 +- queue_services/entity-pay/tests/__init__.py | 1 - queue_services/entity-pay/tests/conftest.py | 19 +- .../entity-pay/tests/unit/__init__.py | 2 +- .../tests/unit/services/test_gcp_queue.py | 9 +- .../entity-pay/tests/unit/test_worker.py | 22 +- 206 files changed, 5609 insertions(+), 5255 deletions(-) create mode 100644 queue_services/entity-auth/__init__.py create mode 100644 queue_services/entity-bn/__init__.py create mode 100644 queue_services/entity-emailer/__init__.py create mode 100644 queue_services/entity-filer/__init__.py create mode 100644 queue_services/entity-pay/__init__.py diff --git a/.github/workflows/business-auth-ci.yml b/.github/workflows/business-auth-ci.yml index b7073570af..ce5112b715 100644 --- a/.github/workflows/business-auth-ci.yml +++ b/.github/workflows/business-auth-ci.yml @@ -6,6 +6,7 @@ on: paths: - "queue_services/entity-auth/**" - "queue_services/common/**" + workflow_dispatch: defaults: run: @@ -13,113 +14,9 @@ defaults: working-directory: ./queue_services/entity-auth jobs: - setup-job: - runs-on: ubuntu-20.04 - - if: github.repository == 'bcgov/lear' - - steps: - - uses: actions/checkout@v3 - - run: "true" - - linting: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - python-version: [3.8] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Lint with pylint - id: pylint - run: | - make pylint - - name: Lint with flake8 - id: flake8 - run: | - make flake8 - - testing: - needs: setup-job - env: - DATABASE_TEST_USERNAME: postgres - DATABASE_TEST_PASSWORD: postgres - DATABASE_TEST_NAME: postgres - DATABASE_TEST_HOST: localhost - DATABASE_HOST: localhost - DATABASE_PASSWORD: postgres - NATS_SERVERS: "nats://nats:4222" - NATS_CLIENT_NAME: entity.legal_api - NATS_CLUSTER_ID: test-cluster - NATS_FILER_SUBJECT: entity.filing.filer - NATS_QUEUE: entity-auth-worker - TEST_NATS_DOCKER: True - STAN_CLUSTER_NAME: test-cluster - JWT_OIDC_JWKS_CACHE_TIMEOUT: 300 - GO_LIVE_DATE: 2019-08-12 - LEGAL_API_URL: https://mock_legal_api_url - ACCOUNT_SVC_ENTITY_URL: https://mock_account_svc_entity_url - COLIN_API: https://mock_colin_api_url - ACCOUNT_SVC_AUTH_URL: https://mock_account_svc_auth_url - ACCOUNT_SVC_CLIENT_ID: account_svc_client_id - ACCOUNT_SVC_CLIENT_SECRET: account_svc_client_secret - BUSINESS_SCHEMA_ID: test_business_schema_id - BUSINESS_CRED_DEF_ID: test_credential_definition_id - BUSINESS_SCHEMA_NAME: digital_business_card - BUSINESS_SCHEMA_VERSION: "1.0.0" - - - runs-on: ubuntu-20.04 - - services: - postgres: - image: postgres:12 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - # needed because the postgres container does not provide a healthcheck - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Test with pytest - id: test - run: | - make test - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - file: ./queue_services/entity-auth/coverage.xml - flags: entityfiler - name: codecov-entity-auth - fail_ci_if_error: true - - build-check: - needs: setup-job - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v3 - - name: build to check strictness - id: build - run: | - make build-nc + business-auth-ci: + uses: bcgov/bcregistry-sre/.github/workflows/backend-ci.yaml@main + with: + app_name: "business-auth" + working_directory: "./queue_services/entity-auth" + codecov_flag: "entityauth" diff --git a/.github/workflows/business-bn-ci.yml b/.github/workflows/business-bn-ci.yml index 54fea27525..89575b737a 100644 --- a/.github/workflows/business-bn-ci.yml +++ b/.github/workflows/business-bn-ci.yml @@ -6,6 +6,7 @@ on: paths: - "queue_services/entity-bn/**" - "queue_services/common/**" + workflow_dispatch: defaults: run: @@ -13,102 +14,9 @@ defaults: working-directory: ./queue_services/entity-bn jobs: - setup-job: - runs-on: ubuntu-20.04 - - if: github.repository == 'bcgov/lear' - - steps: - - uses: actions/checkout@v3 - - run: "true" - - linting: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - python-version: [3.8] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Lint with pylint - id: pylint - run: | - make pylint - - name: Lint with flake8 - id: flake8 - run: | - make flake8 - - testing: - needs: setup-job - env: - DATABASE_TEST_USERNAME: postgres - DATABASE_TEST_PASSWORD: postgres - DATABASE_TEST_NAME: postgres - DATABASE_TEST_HOST: localhost - NATS_SERVERS: "nats://nats:4222" - NATS_CLIENT_NAME: entity.bn.tester - NATS_CLUSTER_ID: test-cluster - NATS_ENTITY_EVENT_SUBJECT: entity.events - NATS_QUEUE: entity-bn-worker - TEMPLATE_PATH: /home/runner/work/lear/lear/queue_services/entity-bn/src/entity_bn/bn_templates - TEST_NATS_DOCKER: True - STAN_CLUSTER_NAME: test-cluster - BN_HUB_API_URL: https://sometest:4443/rest/REST/BCPartner - BN_HUB_CLIENT_ID: id - BN_HUB_CLIENT_SECRET: secret - - runs-on: ubuntu-20.04 - - services: - postgres: - image: postgres:12 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - # needed because the postgres container does not provide a healthcheck - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Test with pytest - id: test - run: | - make test - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - file: ./queue_services/entity-bn/coverage.xml - flags: entitybn - name: codecov-entity-bn - fail_ci_if_error: true - - build-check: - needs: setup-job - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v3 - - name: build to check strictness - id: build - run: | - make build-nc + business-bn-ci: + uses: bcgov/bcregistry-sre/.github/workflows/backend-ci.yaml@main + with: + app_name: "business-bn" + working_directory: "./queue_services/entity-bn" + codecov_flag: "entitybn" diff --git a/.github/workflows/business-emailer-ci.yml b/.github/workflows/business-emailer-ci.yml index e6f818b06d..b1b80399f6 100644 --- a/.github/workflows/business-emailer-ci.yml +++ b/.github/workflows/business-emailer-ci.yml @@ -6,6 +6,7 @@ on: paths: - "queue_services/entity-emailer/**" - "queue_services/common/**" + workflow_dispatch: defaults: run: @@ -13,118 +14,9 @@ defaults: working-directory: ./queue_services/entity-emailer jobs: - setup-job: - runs-on: ubuntu-20.04 - - if: github.repository == 'bcgov/lear' - - steps: - - uses: actions/checkout@v3 - - run: "true" - - linting: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - python-version: [3.8] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Lint with pylint - id: pylint - run: | - make pylint - - name: Lint with flake8 - id: flake8 - run: | - make flake8 - - testing: - needs: setup-job - env: - DATABASE_TEST_USERNAME: postgres - DATABASE_TEST_PASSWORD: postgres - DATABASE_TEST_NAME: postgres - DATABASE_TEST_HOST: localhost - DATABASE_HOST: localhost - DATABASE_PASSWORD: postgres - TRACKER_DATABASE_TEST_USERNAME: postgres - TRACKER_DATABASE_TEST_PASSWORD: postgres - TRACKER_DATABASE_TEST_NAME: postgres - TRACKER_DATABASE_TEST_HOST: localhost - TRACKER_DATABASE_TEST_PORT: 5433 - NATS_SERVERS: "nats://nats:4222" - NATS_CLIENT_NAME: entity.email.tester - NATS_CLUSTER_ID: test-cluster - NATS_FILER_SUBJECT: entity.email - NATS_QUEUE: entity-email-worker - JWT_OIDC_JWKS_CACHE_TIMEOUT: 300 - TEMPLATE_PATH: /home/runner/work/lear/lear/queue_services/entity-emailer/src/entity_emailer/email_templates - DASHBOARD_URL: https://dev.bcregistry.ca/businesses/ - TEST_NATS_DOCKER: True - STAN_CLUSTER_NAME: test-cluster - - runs-on: ubuntu-20.04 - - services: - postgres: - image: postgres:12 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - # needed because the postgres container does not provide a healthcheck - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - postgres-tracker: - image: postgres:12 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5433:5432 - # needed because the postgres container does not provide a healthcheck - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Test with pytest - id: test - run: | - make test - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - file: ./queue_services/entity-emailer/coverage.xml - flags: entityemailer - name: codecov-entity-emailer - fail_ci_if_error: true - - build-check: - needs: setup-job - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v3 - - name: build to check strictness - id: build - run: | - make build-nc + business-emailer-ci: + uses: bcgov/bcregistry-sre/.github/workflows/backend-ci.yaml@main + with: + app_name: "business-emailer" + working_directory: "./queue_services/entity-emailer" + codecov_flag: "entityemailer" diff --git a/.github/workflows/business-filer-ci.yml b/.github/workflows/business-filer-ci.yml index 678e44c874..7eb5d66b26 100644 --- a/.github/workflows/business-filer-ci.yml +++ b/.github/workflows/business-filer-ci.yml @@ -6,6 +6,7 @@ on: paths: - "queue_services/entity-filer/**" - "queue_services/common/**" + workflow_dispatch: defaults: run: @@ -13,109 +14,9 @@ defaults: working-directory: ./queue_services/entity-filer jobs: - setup-job: - runs-on: ubuntu-20.04 - - if: github.repository == 'bcgov/lear' - - steps: - - uses: actions/checkout@v3 - - run: "true" - - linting: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - python-version: [3.8] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Lint with pylint - id: pylint - run: | - make pylint - - name: Lint with flake8 - id: flake8 - run: | - make flake8 - - testing: - needs: setup-job - env: - DATABASE_TEST_USERNAME: postgres - DATABASE_TEST_PASSWORD: postgres - DATABASE_TEST_NAME: postgres - DATABASE_TEST_HOST: localhost - DATABASE_HOST: localhost - DATABASE_PASSWORD: postgres - NATS_SERVERS: "nats://nats:4222" - NATS_CLIENT_NAME: entity.legal_api - NATS_CLUSTER_ID: test-cluster - NATS_FILER_SUBJECT: entity.filing.filer - NATS_QUEUE: entity-filer-worker - TEST_NATS_DOCKER: True - STAN_CLUSTER_NAME: test-cluster - JWT_OIDC_JWKS_CACHE_TIMEOUT: 300 - GO_LIVE_DATE: 2019-08-12 - LEGAL_API_URL: https://mock_legal_api_url - ACCOUNT_SVC_ENTITY_URL: https://mock_account_svc_entity_url - COLIN_API: https://mock_colin_api_url - ACCOUNT_SVC_AUTH_URL: https://mock_account_svc_auth_url - ACCOUNT_SVC_CLIENT_ID: account_svc_client_id - ACCOUNT_SVC_CLIENT_SECRET: account_svc_client_secret - - - runs-on: ubuntu-20.04 - - services: - postgres: - image: postgres:12 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - # needed because the postgres container does not provide a healthcheck - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Test with pytest - id: test - run: | - make test - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - file: ./queue_services/entity-filer/coverage.xml - flags: entityfiler - name: codecov-entity-filer - fail_ci_if_error: true - - build-check: - needs: setup-job - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v3 - - name: build to check strictness - id: build - run: | - make build-nc + business-filer-ci: + uses: bcgov/bcregistry-sre/.github/workflows/backend-ci.yaml@main + with: + app_name: "business-filer" + working_directory: "./queue_services/entity-filer" + codecov_flag: "entityfiler" diff --git a/.github/workflows/business-pay-ci.yml b/.github/workflows/business-pay-ci.yml index 9f7888e05c..d79bcb2871 100644 --- a/.github/workflows/business-pay-ci.yml +++ b/.github/workflows/business-pay-ci.yml @@ -6,6 +6,7 @@ on: paths: - "queue_services/entity-pay/**" - "queue_services/common/**" + workflow_dispatch: defaults: run: @@ -13,104 +14,9 @@ defaults: working-directory: ./queue_services/entity-pay jobs: - setup-job: - runs-on: ubuntu-20.04 - - if: github.repository == 'bcgov/lear' - - steps: - - uses: actions/checkout@v3 - - run: "true" - - linting: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - python-version: [3.8] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Lint with pylint - id: pylint - run: | - make pylint - - name: Lint with flake8 - id: flake8 - run: | - make flake8 - - testing: - needs: setup-job - env: - DATABASE_TEST_USERNAME: postgres - DATABASE_TEST_PASSWORD: postgres - DATABASE_TEST_NAME: postgres - DATABASE_TEST_HOST: localhost - DATABASE_HOST: localhost - DATABASE_PASSWORD: postgres - NATS_SERVERS: "nats://nats:4222" - NATS_CLIENT_NAME: entity.filing.worker - NATS_CLUSTER_ID: test-cluster - NATS_FILER_SUBJECT: entity.filing.filer - NATS_QUEUE: filing-worker - JWT_OIDC_JWKS_CACHE_TIMEOUT: 300 - TEST_NATS_DOCKER: True - STAN_CLUSTER_NAME: test-cluster - GO_LIVE_DATE: 2019-08-12 - NATS_SUBJECT: entity.filings - NATS_EMAILER_SUBJECT: entity.email - - runs-on: ubuntu-20.04 - - services: - postgres: - image: postgres:12 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - # needed because the postgres container does not provide a healthcheck - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Test with pytest - id: test - run: | - make test - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - file: ./queue_services/entity-pay/coverage.xml - flags: entitypay - name: codecov-entity-pay - fail_ci_if_error: true - - build-check: - needs: setup-job - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v3 - - name: build to check strictness - id: build - run: | - make build-nc + business-pay-ci: + uses: bcgov/bcregistry-sre/.github/workflows/backend-ci.yaml@main + with: + app_name: "business-pay" + working_directory: "./queue_services/entity-pay" + codecov_flag: "entitypay" diff --git a/queue_services/entity-auth/__init__.py b/queue_services/entity-auth/__init__.py new file mode 100644 index 0000000000..c09781299b --- /dev/null +++ b/queue_services/entity-auth/__init__.py @@ -0,0 +1,15 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Init.""" +# pylint: disable=invalid-name diff --git a/queue_services/entity-auth/poetry.lock b/queue_services/entity-auth/poetry.lock index 6940c5df58..387b1b66fa 100644 --- a/queue_services/entity-auth/poetry.lock +++ b/queue_services/entity-auth/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -49,6 +49,17 @@ files = [ {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, ] +[[package]] +name = "astroid" +version = "3.0.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, +] + [[package]] name = "attrs" version = "23.1.0" @@ -84,31 +95,58 @@ setuptools = {version = "*", markers = "python_version >= \"3.12\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "bandit" +version = "1.7.7" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, + {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -120,7 +158,7 @@ platformdirs = ">=2" [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -304,6 +342,70 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "datedelta" version = "1.4" @@ -315,6 +417,20 @@ files = [ {file = "datedelta-1.4.tar.gz", hash = "sha256:3f1ef319ead642a76a3cab731917bf14a0ced0d91943f33ff57ae615837cab97"}, ] +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + [[package]] name = "dpath" version = "2.1.6" @@ -340,6 +456,38 @@ files = [ [package.extras] tests = ["coverage", "coveralls", "dill", "mock", "nose"] +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + [[package]] name = "flask" version = "2.3.3" @@ -746,6 +894,20 @@ files = [ [package.dependencies] arrow = ">=0.15.0" +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -870,6 +1032,30 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.3" @@ -939,6 +1125,28 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -972,6 +1180,17 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + [[package]] name = "pg8000" version = "1.30.3" @@ -1079,6 +1298,17 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + [[package]] name = "pycountry" version = "22.3.5" @@ -1092,6 +1322,59 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pylint" +version = "3.0.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, +] + +[package.dependencies] +astroid = ">=3.0.1,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pyrfc3339" version = "1.1" @@ -1126,6 +1409,24 @@ pluggy = ">=0.12,<2.0" [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "pytest-mock" version = "3.12.0" @@ -1182,6 +1483,66 @@ files = [ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "referencing" version = "0.30.2" @@ -1264,6 +1625,24 @@ files = [ {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, ] +[[package]] +name = "rich" +version = "13.7.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "rpds-py" version = "0.12.0" @@ -1604,6 +1983,20 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, + {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + [[package]] name = "strict-rfc3339" version = "0.7" @@ -1614,6 +2007,17 @@ files = [ {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, ] +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, +] + [[package]] name = "types-python-dateutil" version = "2.8.19.14" @@ -1702,4 +2106,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "74f761b2eee771ab6d896c5caa0362c2d554af5689fa38b7d697aa248b8a9cc0" +content-hash = "4edbfb665b56768823f65bfa6d7a5ba5f503627e1ef7931b325c7beb10417e9f" diff --git a/queue_services/entity-auth/pyproject.toml b/queue_services/entity-auth/pyproject.toml index 2eecfa6a69..0d41a790c7 100644 --- a/queue_services/entity-auth/pyproject.toml +++ b/queue_services/entity-auth/pyproject.toml @@ -28,8 +28,131 @@ Flask-Migrate = "^4.0.4" flask-babel = "^3.1.0" pytest = "^7.4.0" pytest-mock = "^3.11.1" -black = "^23.7.0" +pytest-cov = "^4.0.0" freezegun = "^1.2.2" +black = "^23.12.1" +pylint = "^3.0.3" +bandit = "^1.7.6" +flake8-pyproject = "^1.2.3" +isort = "^5.13.2" + +[tool.bandit] +exclude_dirs = [".venv","tests"] +skips = ["B104"] + +[tool.flake8] +ignore = ["F401","E402", "Q000", "E203", "W503"] +exclude = [ + ".venv", + ".git", + ".history", + "devops", + "*migrations*", +] +per-file-ignores = [ + "__init__.py:F401", + "*.py:B902" +] +max-line-length = 120 +docstring-min-length=10 +count = true + +[tool.black] +target-version = ["py310", "py311", "py312"] +line-length = 120 +include = '\.pyi?$' +extend-exclude = ''' +/( + # The following are specific to Black, you probably don't want those. + migrations + | devops +)/ +''' + +[tool.isort] +atomic = true +profile = "black" +line_length = 120 +skip_gitignore = true +skip_glob = ["migrations", "devops"] + +[tool.pylint.main] +fail-under = 10 +max-line-length = 120 +ignore = [ "migrations", "devops", "tests"] +ignore-patterns = ["^\\.#"] +ignored-modules= ["flask_sqlalchemy", "sqlalchemy", "SQLAlchemy" , "alembic", "scoped_session"] +ignored-classes= "scoped_session" +ignore-long-lines = "^\\s*(# )??$" +extension-pkg-whitelist = "pydantic" +notes = ["FIXME","XXX","TODO"] +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +disable = "C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101" +argument-naming-style = "snake_case" +attr-naming-style = "snake_case" +class-attribute-naming-style = "any" +class-const-naming-style = "UPPER_CASE" +class-naming-style = "PascalCase" +const-naming-style = "UPPER_CASE" +function-naming-style = "snake_case" +inlinevar-naming-style = "any" +method-naming-style = "snake_case" +module-naming-style = "any" +variable-naming-style = "snake_case" +docstring-min-length = -1 +good-names = ["i", "j", "k", "ex", "Run", "_"] +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +valid-classmethod-first-arg = ["cls"] +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pytest.ini_options] +minversion = "2.0" +testpaths = [ + "tests", +] +addopts = "--verbose --strict -p no:warnings --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml" +python_files = [ + "test*.py" +] +norecursedirs = [ + ".git", ".tox", "venv*", "requirements*", "build", +] +log_cli = true +log_cli_level = "1" +filterwarnings = [ + "ignore::UserWarning" +] +markers = [ + "slow", + "serial", +] + +[tool.coverage.run] +branch = true +source = [ + "src/entity-auth", +] +omit = [ + "wsgi.py", + "gunicorn_config.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "from", + "import", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + 'if __name__ == "__main__":', +] [build-system] requires = ["poetry-core"] diff --git a/queue_services/entity-auth/src/entity_auth/__init__.py b/queue_services/entity-auth/src/entity_auth/__init__.py index b3e63b3dcc..9e455d9d26 100644 --- a/queue_services/entity-auth/src/entity_auth/__init__.py +++ b/queue_services/entity-auth/src/entity_auth/__init__.py @@ -17,11 +17,10 @@ """ from __future__ import annotations -from flask import Flask from business_model import db +from flask import Flask -from .config import Config -from .config import Production +from .config import Config, Production from .resources import register_endpoints from .services import queue from .utils import get_run_version diff --git a/queue_services/entity-auth/src/entity_auth/config.py b/queue_services/entity-auth/src/entity_auth/config.py index 8cd475918c..c11dc29936 100644 --- a/queue_services/entity-auth/src/entity_auth/config.py +++ b/queue_services/entity-auth/src/entity_auth/config.py @@ -23,7 +23,6 @@ from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) @@ -50,11 +49,11 @@ class Config: # pylint: disable=too-few-public-methods # POSTGRESQL if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): - SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" - else: SQLALCHEMY_DATABASE_URI = ( - f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" + f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" ) + else: + SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" # API Endpoints NAMEX_API_URL = os.getenv("NAMEX_API_URL", "") @@ -70,12 +69,8 @@ class Config: # pylint: disable=too-few-public-methods # pub/sub GCP_AUTH_KEY = os.getenv("GCP_AUTH_KEY", None) - AUDIENCE = os.getenv( - "AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber" - ) - PUBLISHER_AUDIENCE = os.getenv( - "PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher" - ) + AUDIENCE = os.getenv("AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber") + PUBLISHER_AUDIENCE = os.getenv("PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher") # legislative timezone for future effective dating LEGISLATIVE_TIMEZONE = os.getenv("LEGISLATIVE_TIMEZONE", "America/Vancouver") diff --git a/queue_services/entity-auth/src/entity_auth/resources/__init__.py b/queue_services/entity-auth/src/entity_auth/resources/__init__.py index 06ea6ade3c..d48c045ea6 100644 --- a/queue_services/entity-auth/src/entity_auth/resources/__init__.py +++ b/queue_services/entity-auth/src/entity_auth/resources/__init__.py @@ -41,6 +41,7 @@ def register_endpoints(app: Flask): + """Register endpoints with the flask application""" # Allow base route to match with, and without a trailing slash app.url_map.strict_slashes = False diff --git a/queue_services/entity-auth/src/entity_auth/resources/worker.py b/queue_services/entity-auth/src/entity_auth/resources/worker.py index db6136668a..0dcf82007b 100644 --- a/queue_services/entity-auth/src/entity_auth/resources/worker.py +++ b/queue_services/entity-auth/src/entity_auth/resources/worker.py @@ -39,14 +39,12 @@ from http import HTTPStatus from typing import Optional -from flask import Blueprint, current_app -from flask import request +from business_model import EntityRole, Filing, LegalEntity, RegistrationBootstrap +from flask import Blueprint, current_app, request from simple_cloudevent import SimpleCloudEvent from sqlalchemy.exc import OperationalError -from business_model import EntityRole, Filing, LegalEntity from entity_auth.exceptions import AccountServiceException - from entity_auth.services import name_request, queue from entity_auth.services.bootstrap import AccountService from entity_auth.services.logging import structured_log @@ -54,6 +52,8 @@ @dataclass class Message: + """Worker message class""" + id: Optional[str] = None type: Optional[str] = None filing_id: Optional[str] = None @@ -135,11 +135,11 @@ def process_request( filing: Filing = Filing.find_by_id(msg.filing_id) if not filing: - raise Exception + raise Exception # pylint: disable=broad-exception-raised legal_entity: LegalEntity = LegalEntity.find_by_internal_id(filing.legal_entity_id) if not legal_entity: - raise Exception + raise Exception # pylint: disable=broad-exception-raised name_request.consume_nr(legal_entity, filing) @@ -181,7 +181,6 @@ def process_request( def create_affiliation(legal_entity: LegalEntity, filing: Filing): """Create an affiliation for the business and remove the bootstrap.""" - from business_model import RegistrationBootstrap try: bootstrap = RegistrationBootstrap.find_by_identifier(filing.temp_reg) @@ -192,12 +191,7 @@ def create_affiliation(legal_entity: LegalEntity, filing: Filing): if legal_entity.entity_type in ["SP", "GP"]: corp_type_temp_code = "RTMP" pass_code = get_firm_affiliation_passcode(legal_entity.id) - nr_number = ( - filing.filing_json.get("filing") - .get("registration", {}) - .get("nameRequest", {}) - .get("nrNumber") - ) + nr_number = filing.filing_json.get("filing").get("registration", {}).get("nameRequest", {}).get("nrNumber") details = { "bootstrapIdentifier": bootstrap.identifier, @@ -215,9 +209,7 @@ def create_affiliation(legal_entity: LegalEntity, filing: Filing): ) if rv not in (HTTPStatus.OK, HTTPStatus.CREATED): - deaffiliation = AccountService.delete_affiliation( - bootstrap.account, legal_entity.identifier - ) + deaffiliation = AccountService.delete_affiliation(bootstrap.account, legal_entity.identifier) current_app.logger.error( f"Queue Error: Unable to affiliate business:{legal_entity.identifier} for filing:{filing.id}" ) @@ -234,14 +226,10 @@ def create_affiliation(legal_entity: LegalEntity, filing: Filing): or ("deaffiliation" in locals() and deaffiliation != HTTPStatus.OK) or ("bootstrap_update" in locals() and bootstrap_update != HTTPStatus.OK) ): - raise Exception - except ( - Exception - ) as err: # pylint: disable=broad-except; note out any exception, but don't fail the call - current_app.logger.error( - f"Queue Error: Affiliation error for filing:{filing.id}, with err:{err}" - ) - raise AccountServiceException + raise Exception # pylint: disable=broad-exception-raised + except Exception as err: # pylint: disable=broad-except; note out any exception, but don't fail the call + current_app.logger.error(f"Queue Error: Affiliation error for filing:{filing.id}, with err:{err}") + raise AccountServiceException from err def get_firm_affiliation_passcode(legal_entity_id: int): diff --git a/queue_services/entity-auth/src/entity_auth/services/__init__.py b/queue_services/entity-auth/src/entity_auth/services/__init__.py index 19d346fa93..aeb71bc835 100644 --- a/queue_services/entity-auth/src/entity_auth/services/__init__.py +++ b/queue_services/entity-auth/src/entity_auth/services/__init__.py @@ -34,5 +34,4 @@ """This module contains all the services used.""" from .gcp_queue import GcpQueue - queue = GcpQueue() diff --git a/queue_services/entity-auth/src/entity_auth/services/bootstrap.py b/queue_services/entity-auth/src/entity_auth/services/bootstrap.py index 196ae935dc..c3f6f081b7 100644 --- a/queue_services/entity-auth/src/entity_auth/services/bootstrap.py +++ b/queue_services/entity-auth/src/entity_auth/services/bootstrap.py @@ -21,11 +21,10 @@ from typing import Dict, Union import requests +from business_model import RegistrationBootstrap from flask import current_app from sqlalchemy.orm.exc import FlushError # noqa: I001 -from business_model import RegistrationBootstrap - class RegistrationBootstrapService: """Provides services to bootstrap the IA registration and account affiliation.""" @@ -41,9 +40,7 @@ def create_bootstrap(account: int) -> Union[Dict, RegistrationBootstrap]: # try to create a bootstrap registration with a unique ID for _ in range(5): - bootstrap.identifier = "T" + "".join( - secrets.choice(allowed_encoded) for _ in range(9) - ) + bootstrap.identifier = "T" + "".join(secrets.choice(allowed_encoded) for _ in range(9)) try: bootstrap.save() return bootstrap @@ -84,9 +81,7 @@ def register_bootstrap( return HTTPStatus.OK with contextlib.suppress(Exception): - AccountService.delete_affiliation( - account=bootstrap.account, business_registration=bootstrap.identifier - ) + AccountService.delete_affiliation(account=bootstrap.account, business_registration=bootstrap.identifier) return {"error": "Unable to create bootstrap registration."} @staticmethod @@ -188,10 +183,7 @@ def create_affiliation( ) # @TODO delete affiliation and entity record next sprint when affiliation service is updated - if ( - affiliate.status_code != HTTPStatus.CREATED - or entity_record.status_code != HTTPStatus.CREATED - ): + if affiliate.status_code != HTTPStatus.CREATED or entity_record.status_code != HTTPStatus.CREATED: return HTTPStatus.BAD_REQUEST return HTTPStatus.OK @@ -271,9 +263,7 @@ def get_account_by_affiliated_identifier(cls, identifier: str): auth_url = current_app.config.get("AUTH_SVC_URL") url = f"{auth_url}/orgs?affiliation={identifier}" - res = requests.get( - url, headers={**cls.CONTENT_TYPE_JSON, "Authorization": cls.BEARER + token} - ) + res = requests.get(url, headers={**cls.CONTENT_TYPE_JSON, "Authorization": cls.BEARER + token}) try: return res.json() except Exception: # noqa B902; pylint: disable=W0703; diff --git a/queue_services/entity-auth/src/entity_auth/services/gcp_queue.py b/queue_services/entity-auth/src/entity_auth/services/gcp_queue.py index e29cf53c54..258c707ea1 100644 --- a/queue_services/entity-auth/src/entity_auth/services/gcp_queue.py +++ b/queue_services/entity-auth/src/entity_auth/services/gcp_queue.py @@ -36,24 +36,29 @@ import base64 import json -from concurrent.futures import CancelledError from concurrent.futures import TimeoutError # pylint: disable=W0622 +from concurrent.futures import CancelledError from contextlib import suppress from typing import Optional from flask import Flask, current_app -from werkzeug.local import LocalProxy from google.auth import jwt from google.cloud import pubsub_v1 -from simple_cloudevent import CloudEventVersionException -from simple_cloudevent import InvalidCloudEventError -from simple_cloudevent import SimpleCloudEvent -from simple_cloudevent import from_queue_message -from simple_cloudevent import to_queue_message +from simple_cloudevent import ( + CloudEventVersionException, + InvalidCloudEventError, + SimpleCloudEvent, + from_queue_message, + to_queue_message, +) +from werkzeug.local import LocalProxy class GcpQueue: + """Provides Queue type services""" + def __init__(self, app: Flask = None): + """Initializes the GCP Queue class""" self.audience = None self.credentials_pub = None self.gcp_auth_key = None @@ -62,9 +67,10 @@ def __init__(self, app: Flask = None): self._publisher = None if app: - self.app_init(app) + self.init_app(app) def init_app(self, app: Flask): + """Initializes the application""" self.gcp_auth_key = app.config.get("GCP_AUTH_KEY") if self.gcp_auth_key: try: @@ -77,32 +83,24 @@ def init_app(self, app: Flask): "https://pubsub.googleapis.com/google.pubsub.v1.Publisher", ) - self.service_account_info = json.loads( - base64.b64decode(self.gcp_auth_key).decode("utf-8") - ) - credentials = jwt.Credentials.from_service_account_info( - self.service_account_info, audience=audience - ) - self.credentials_pub = credentials.with_claims( - audience=publisher_audience - ) + self.service_account_info = json.loads(base64.b64decode(self.gcp_auth_key).decode("utf-8")) + credentials = jwt.Credentials.from_service_account_info(self.service_account_info, audience=audience) + self.credentials_pub = credentials.with_claims(audience=publisher_audience) except Exception as error: # noqa: B902 - raise Exception( - "Unable to create a connection", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to create a connection", error) from error # pylint: disable=W0719 @property def publisher(self): + """Returns the publisher""" if not self._publisher and self.credentials_pub: - self._publisher = pubsub_v1.PublisherClient( - credentials=self.credentials_pub - ) + self._publisher = pubsub_v1.PublisherClient(credentials=self.credentials_pub) else: self._publisher = pubsub_v1.PublisherClient() return self.credentials_pub @staticmethod def is_valid_envelope(msg: dict): + """Checks if the envelope is valid""" if ( msg.get("subscription") and (message := msg.get("message")) @@ -114,17 +112,14 @@ def is_valid_envelope(msg: dict): @staticmethod def get_envelope(request: LocalProxy) -> Optional[dict]: + """Returns the envelope""" with suppress(Exception): - if (envelope := request.get_json()) and GcpQueue.is_valid_envelope( - envelope - ): + if (envelope := request.get_json()) and GcpQueue.is_valid_envelope(envelope): return envelope return None @staticmethod - def get_simple_cloud_event( - request: LocalProxy, return_raw: bool = False - ) -> type[SimpleCloudEvent | dict | None]: + def get_simple_cloud_event(request: LocalProxy, return_raw: bool = False) -> type[SimpleCloudEvent | dict | None]: """Return a SimpleCloudEvent if one is in session from the PubSub call. Parameters @@ -176,14 +171,14 @@ def publish(self, topic: str, payload: bytes): return future.result() except (CancelledError, TimeoutError) as error: - raise Exception( - "Unable to post to queue", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to post to queue", error) from error # pylint: disable=W0719 @staticmethod def to_queue_message(ce: SimpleCloudEvent): + """Return a byte string of the CloudEvent in JSON format""" return to_queue_message(ce) @staticmethod def from_queue_message(data: dict): + """Convert a queue message back to a simple CloudEvent""" return from_queue_message(data) diff --git a/queue_services/entity-auth/src/entity_auth/services/logging.py b/queue_services/entity-auth/src/entity_auth/services/logging.py index 8fb46cbb79..a72b49e940 100644 --- a/queue_services/entity-auth/src/entity_auth/services/logging.py +++ b/queue_services/entity-auth/src/entity_auth/services/logging.py @@ -40,29 +40,28 @@ def structured_log(request: LocalProxy, severity: str = "NOTICE", message: str = None): + """Prints structured log message""" frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) # Build structured log messages as an object. global_log_fields = {} - if PROJECT := os.environ.get("GOOGLE_CLOUD_PROJECT"): + if project := os.environ.get("GOOGLE_CLOUD_PROJECT"): # Add log correlation to nest all log messages. trace_header = request.headers.get("X-Cloud-Trace-Context") - if trace_header and PROJECT: + if trace_header and project: trace = trace_header.split("/") - global_log_fields[ - "logging.googleapis.com/trace" - ] = f"projects/{PROJECT}/traces/{trace[0]}" + global_log_fields["logging.googleapis.com/trace"] = f"projects/{project}/traces/{trace[0]}" # Complete a structured log entry. - entry = dict( - severity=severity, - message=message, + entry = { + "severity": severity, + "message": message, # Log viewer accesses 'component' as jsonPayload.component'. - component=f"{mod.__name__}.{frm.function}", + "component": f"{mod.__name__}.{frm.function}", **global_log_fields, - ) + } print(json.dumps(entry)) diff --git a/queue_services/entity-auth/src/entity_auth/services/name_request.py b/queue_services/entity-auth/src/entity_auth/services/name_request.py index a9f5baec86..7f10aac5d4 100644 --- a/queue_services/entity-auth/src/entity_auth/services/name_request.py +++ b/queue_services/entity-auth/src/entity_auth/services/name_request.py @@ -16,11 +16,12 @@ from http import HTTPStatus import requests -from flask import current_app, request from business_model import Filing, LegalEntity, RegistrationBootstrap +from flask import current_app, request from entity_auth.exceptions import NamexException from entity_auth.services.logging import structured_log + from .bootstrap import AccountService @@ -52,13 +53,9 @@ def consume_nr(legal_entity: LegalEntity, filing: Filing): raise NamexException # remove the NR from the account - if filing.temp_reg and ( - bootstrap := RegistrationBootstrap.find_by_identifier(filing.temp_reg) - ): + if filing.temp_reg and (bootstrap := RegistrationBootstrap.find_by_identifier(filing.temp_reg)): AccountService.delete_affiliation(bootstrap.account, nr_num) - except ( - Exception - ): # pylint: disable=broad-except; note out any exception, but don't fail the call + except Exception: # pylint: disable=broad-except; note out any exception, but don't fail the call structured_log( request, "ERROR", diff --git a/queue_services/entity-auth/tests/__init__.py b/queue_services/entity-auth/tests/__init__.py index 276f418460..748f0b462e 100644 --- a/queue_services/entity-auth/tests/__init__.py +++ b/queue_services/entity-auth/tests/__init__.py @@ -14,7 +14,6 @@ """The Test Suites to ensure that the service is built and operating correctly.""" import datetime - EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362) diff --git a/queue_services/entity-auth/tests/conftest.py b/queue_services/entity-auth/tests/conftest.py index 743b99e6f0..1b562d73e7 100644 --- a/queue_services/entity-auth/tests/conftest.py +++ b/queue_services/entity-auth/tests/conftest.py @@ -16,21 +16,23 @@ import os from contextlib import contextmanager, suppress from typing import Final -from flask_migrate import Migrate, upgrade import pytest from business_model import db as _db -from sqlalchemy import create_engine, event, exc as sqlalchemy_exc, text -from entity_auth import create_app +from flask_migrate import Migrate, upgrade +from sqlalchemy import create_engine, event +from sqlalchemy import exc as sqlalchemy_exc +from sqlalchemy import text +from entity_auth import create_app from entity_auth.config import Testing from . import FROZEN_DATETIME - DB_TEST_NAME: Final = os.getenv("DATABASE_TEST_NAME") +# pylint: disable-next=too-many-arguments def create_test_db( user: str = None, password: str = None, @@ -58,17 +60,13 @@ def create_test_db( : bool If the create database succeeded. """ - if database_uri: - DATABASE_URI = database_uri - else: - DATABASE_URI = f"postgresql://{user}:{password}@{host}:{port}/{user}" + if not database_uri: + database_uri = f"postgresql://{user}:{password}@{host}:{port}/{user}" - DATABASE_URI = DATABASE_URI[: DATABASE_URI.rfind("/")] + "/postgres" + database_uri = database_uri[: database_uri.rfind("/")] + "/postgres" try: - with create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with create_engine(database_uri, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(f"CREATE DATABASE {database}")) return True @@ -77,6 +75,7 @@ def create_test_db( return False +# pylint: disable-next=too-many-arguments def drop_test_db( user: str = None, password: str = None, @@ -86,12 +85,10 @@ def drop_test_db( database_uri: str = None, ) -> bool: """Delete the database in our .devcontainer launched postgres DB.""" - if database_uri: - DATABASE_URI = database_uri - else: - DATABASE_URI = f"postgresql://{user}:{password}@{host}:{port}/{user}" + if not database_uri: + database_uri = f"postgresql://{user}:{password}@{host}:{port}/{user}" - DATABASE_URI = DATABASE_URI[: DATABASE_URI.rfind("/")] + "/postgres" + database_uri = database_uri[: database_uri.rfind("/")] + "/postgres" close_all = f""" SELECT pg_terminate_backend(pg_stat_activity.pid) @@ -100,9 +97,7 @@ def drop_test_db( AND pid <> pg_backend_pid(); """ with suppress(sqlalchemy_exc.ProgrammingError, Exception): - with create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with create_engine(database_uri, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(close_all)) conn.execute(text(f"DROP DATABASE {database}")) @@ -115,8 +110,8 @@ def not_raises(exception): """ try: yield - except exception: - raise pytest.fail(f"DID RAISE {exception}") + except exception as exc: + raise pytest.fail(f"DID RAISE {exception}") from exc # fixture to freeze utcnow to a fixed date-time @@ -127,6 +122,7 @@ def freeze_datetime_utcnow(monkeypatch): class _Datetime: @classmethod def utcnow(cls): + """Returns utc now""" return FROZEN_DATETIME monkeypatch.setattr(datetime, "datetime", _Datetime) @@ -140,7 +136,7 @@ def app(): @pytest.fixture -def config(app): +def config(app): # pylint: disable=redefined-outer-name """Return the application config.""" return app.config @@ -189,9 +185,9 @@ def session(app, db): # pylint: disable=redefined-outer-name, invalid-name txn = conn.begin() try: - options = dict(bind=conn, binds={}) + options = {"bind": conn, "binds": {}} # sess = db.create_scoped_session(options=options) - sess = db._make_scoped_session(options=options) + sess = db._make_scoped_session(options=options) # pylint: disable=protected-access except Exception as err: print(err) print("done") @@ -203,9 +199,7 @@ def session(app, db): # pylint: disable=redefined-outer-name, invalid-name @event.listens_for(sess(), "after_transaction_end") def restart_savepoint(sess2, trans): # pylint: disable=unused-variable # Detecting whether this is indeed the nested transaction of the test - if ( - trans.nested and not trans._parent.nested - ): # pylint: disable=protected-access + if trans.nested and not trans._parent.nested: # pylint: disable=protected-access # Handle where test DOESN'T session.commit(), sess2.expire_all() sess.begin_nested() diff --git a/queue_services/entity-auth/tests/unit/__init__.py b/queue_services/entity-auth/tests/unit/__init__.py index 024b0bbdb1..99cbb9ea98 100644 --- a/queue_services/entity-auth/tests/unit/__init__.py +++ b/queue_services/entity-auth/tests/unit/__init__.py @@ -15,11 +15,11 @@ import base64 import copy from datetime import datetime -from business_model import LegalEntity +from business_model import Address, AlternateName, EntityRole, Filing, LegalEntity, Office from simple_cloudevent import SimpleCloudEvent, to_queue_message -from entity_auth.services.bootstrap import RegistrationBootstrapService +from entity_auth.services.bootstrap import RegistrationBootstrapService from tests import EPOCH_DATETIME @@ -30,8 +30,6 @@ def create_filing( filing_date=EPOCH_DATETIME, ): """Return a test filing.""" - from business_model import Filing - filing = Filing() filing.filing_date = filing_date @@ -52,10 +50,8 @@ def create_legal_entity( bn9=None, tax_id=None, change_filing_id=None, -): +): # pylint: disable=too-many-arguments """Return a test legal_entity.""" - from business_model import LegalEntity - legal_entity = LegalEntity() legal_entity.identifier = identifier legal_entity.entity_type = entity_type @@ -74,8 +70,6 @@ def create_legal_entity( def create_alternate_name(operating_name, tax_id=None): """Create operating name.""" - from business_model import AlternateName - alternate_name = AlternateName( # identifier="BC1234567", name_type=AlternateName.NameType.OPERATING, @@ -88,8 +82,6 @@ def create_alternate_name(operating_name, tax_id=None): def create_business_address(office_type="businessOffice", change_filing_id=None): """Create an address.""" - from business_model import Address, Office - office = Office(office_type=office_type) office.change_filing_id = change_filing_id @@ -99,10 +91,8 @@ def create_business_address(office_type="businessOffice", change_filing_id=None) return office -def create_office(type): +def create_office(type): # pylint: disable=redefined-builtin """Create an office.""" - from business_model import Address - address = Address( city="Test City", street="Test Street", @@ -116,18 +106,14 @@ def create_office(type): def create_related_entity(related_entity_json): """Create a party.""" - from business_model import Address, LegalEntity - new_party = LegalEntity() new_party.first_name = related_entity_json["officer"].get("firstName", "").upper() new_party.last_name = related_entity_json["officer"].get("lastName", "").upper() - new_party.middle_initial = ( - related_entity_json["officer"].get("middleInitial", "").upper() - ) + new_party.middle_initial = related_entity_json["officer"].get("middleInitial", "").upper() new_party.title = related_entity_json.get("title", "").upper() new_party._legal_name = ( related_entity_json["officer"].get("organizationName", "").upper() - ) + ) # pylint: disable=protected-access new_party.email = related_entity_json["officer"].get("email") new_party.entity_type = related_entity_json["officer"].get("entityType") new_party.identifier = related_entity_json["officer"].get("identifier") @@ -140,9 +126,7 @@ def create_related_entity(related_entity_json): country="CA", postal_code=related_entity_json["mailingAddress"]["postalCode"], region=related_entity_json["mailingAddress"]["addressRegion"], - delivery_instructions=related_entity_json["mailingAddress"] - .get("deliveryInstructions", "") - .upper(), + delivery_instructions=related_entity_json["mailingAddress"].get("deliveryInstructions", "").upper(), ) new_party.entity_mailing_address = mailing_address if related_entity_json.get("deliveryAddress"): @@ -152,19 +136,14 @@ def create_related_entity(related_entity_json): country="CA", postal_code=related_entity_json["deliveryAddress"]["postalCode"], region=related_entity_json["deliveryAddress"]["addressRegion"], - delivery_instructions=related_entity_json["deliveryAddress"] - .get("deliveryInstructions", "") - .upper(), + delivery_instructions=related_entity_json["deliveryAddress"].get("deliveryInstructions", "").upper(), ) new_party.entity_delivery_address = delivery_address return new_party -def create_entity_role( - legal_entity, related_entity, roles, appointment_date=EPOCH_DATETIME -): +def create_entity_role(legal_entity, related_entity, roles, appointment_date=EPOCH_DATETIME): """Create party roles.""" - from business_model import EntityRole for role in roles: entity_role = EntityRole( @@ -252,8 +231,9 @@ def create_data(filing_type, entity_type, identifier, bn9=None, tax_id=None): return filing, legal_entity -def get_json_message(filing_id, identifier, message_id, type): - CLOUD_EVENT = SimpleCloudEvent( +def get_json_message(filing_id, identifier, message_id, type): # pylint: disable=redefined-builtin + """Returns the json message data""" + cloud_event = SimpleCloudEvent( source="fake-for-tests", subject="fake-subject", id=message_id, @@ -267,7 +247,7 @@ def get_json_message(filing_id, identifier, message_id, type): json_data = { "subscription": "projects/PUBSUB_PROJECT_ID/subscriptions/SUBSCRIPTION_ID", "message": { - "data": base64.b64encode(to_queue_message(CLOUD_EVENT)).decode("utf-8"), + "data": base64.b64encode(to_queue_message(cloud_event)).decode("utf-8"), }, } return json_data diff --git a/queue_services/entity-auth/tests/unit/test_version.py b/queue_services/entity-auth/tests/unit/test_version.py index 2e696a2f6d..667ecdbb81 100644 --- a/queue_services/entity-auth/tests/unit/test_version.py +++ b/queue_services/entity-auth/tests/unit/test_version.py @@ -17,10 +17,10 @@ Test-Suite to ensure that the version utilities are working as expected. """ from importlib.metadata import version +from uuid import uuid4 from entity_auth import utils - PACKAGE_NAME = "entity_auth" @@ -32,8 +32,6 @@ def test_get_version(): def test_get_version_hash(monkeypatch): """Assert that the version also contains the git commit hash.""" - from uuid import uuid4 - fake_hash = str(uuid4()) monkeypatch.setenv("VCS_REF", fake_hash) rv = utils.get_run_version() diff --git a/queue_services/entity-auth/tests/unit/test_worker.py b/queue_services/entity-auth/tests/unit/test_worker.py index 501dcd7ebf..11bd90e50a 100644 --- a/queue_services/entity-auth/tests/unit/test_worker.py +++ b/queue_services/entity-auth/tests/unit/test_worker.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. """The Test Suites to ensure that the entity auth is operating correctly.""" -from http import HTTPStatus import uuid +from http import HTTPStatus import pytest from business_model import LegalEntity @@ -29,9 +29,8 @@ ("incorporationApplication", "BC", "BC1234567"), ], ) -def test_new_legal_entity( - app, session, client, mocker, filing_type, entity_type, identifier -): +# pylint: disable-next=too-many-arguments +def test_new_legal_entity(app, session, client, mocker, filing_type, entity_type, identifier): """Test new legal entity.""" filing, legal_entity = create_data(filing_type, entity_type, identifier) @@ -43,7 +42,7 @@ def create_affiliation_side_effect( corp_type_code, pass_code, details, - ): + ): # pylint: disable=too-many-arguments assert account == 1 assert business_registration == legal_entity.identifier assert business_name == legal_entity.legal_name @@ -61,9 +60,7 @@ def create_affiliation_side_effect( assert details == { "bootstrapIdentifier": filing.temp_reg, "identifier": legal_entity.identifier, - "nrNumber": filing.filing_json["filing"][filing_type]["nameRequest"][ - "nrNumber" - ], + "nrNumber": filing.filing_json["filing"][filing_type]["nameRequest"]["nrNumber"], } return HTTPStatus.OK @@ -90,9 +87,7 @@ def update_entity_side_effect( ) message_id = str(uuid.uuid4()) - json_data = get_json_message( - filing.id, identifier, message_id, f"bc.registry.business.{filing_type}" - ) + json_data = get_json_message(filing.id, identifier, message_id, f"bc.registry.business.{filing_type}") rv = client.post("/", json=json_data) assert rv.status_code == HTTPStatus.OK @@ -114,9 +109,8 @@ def update_entity_side_effect( ("restoration", "SP", "FM1234567"), ], ) -def test_update_entity( - app, session, client, mocker, filing_type, entity_type, identifier -): +# pylint: disable-next=too-many-arguments +def test_update_entity(app, session, client, mocker, filing_type, entity_type, identifier): """Test update entity.""" filing, legal_entity = create_data(filing_type, entity_type, identifier) @@ -145,8 +139,6 @@ def update_entity_side_effect( ) message_id = str(uuid.uuid4()) - json_data = get_json_message( - filing.id, identifier, message_id, f"bc.registry.business.{filing_type}" - ) + json_data = get_json_message(filing.id, identifier, message_id, f"bc.registry.business.{filing_type}") rv = client.post("/", json=json_data) assert rv.status_code == HTTPStatus.OK diff --git a/queue_services/entity-bn/__init__.py b/queue_services/entity-bn/__init__.py new file mode 100644 index 0000000000..c09781299b --- /dev/null +++ b/queue_services/entity-bn/__init__.py @@ -0,0 +1,15 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Init.""" +# pylint: disable=invalid-name diff --git a/queue_services/entity-bn/poetry.lock b/queue_services/entity-bn/poetry.lock index 92d702ad1c..73048f78c9 100644 --- a/queue_services/entity-bn/poetry.lock +++ b/queue_services/entity-bn/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -106,6 +106,20 @@ files = [ {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, ] +[[package]] +name = "astroid" +version = "3.0.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "asyncio-nats-client" version = "0.11.5" @@ -168,6 +182,29 @@ setuptools = {version = "*", markers = "python_version >= \"3.12\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "bandit" +version = "1.7.7" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, + {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -188,29 +225,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -224,7 +265,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -460,6 +501,73 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "datedelta" version = "1.4" @@ -471,6 +579,20 @@ files = [ {file = "datedelta-1.4.tar.gz", hash = "sha256:3f1ef319ead642a76a3cab731917bf14a0ced0d91943f33ff57ae615837cab97"}, ] +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + [[package]] name = "dpath" version = "2.1.6" @@ -528,6 +650,39 @@ files = [ [package.extras] tests = ["coverage", "coveralls", "dill", "mock", "nose"] +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" +TOMLi = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + [[package]] name = "flask" version = "2.3.3" @@ -671,12 +826,12 @@ files = [ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -726,8 +881,8 @@ grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" grpcio = ">=1.51.3,<2.0dev" grpcio-status = ">=1.33.2" proto-plus = [ - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -991,6 +1146,20 @@ files = [ [package.dependencies] arrow = ">=0.15.0" +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -1269,6 +1438,30 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.3" @@ -1338,6 +1531,28 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "minio" version = "7.2.0" @@ -1388,6 +1603,17 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + [[package]] name = "pg8000" version = "1.30.3" @@ -1548,6 +1774,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -1582,6 +1810,17 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + [[package]] name = "pycountry" version = "22.3.5" @@ -1699,6 +1938,61 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pylint" +version = "3.0.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, +] + +[package.dependencies] +astroid = ">=3.0.1,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pypdf2" version = "3.0.1" @@ -1753,6 +2047,24 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "pytest-mock" version = "3.12.0" @@ -1830,6 +2142,66 @@ files = [ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "referencing" version = "0.30.2" @@ -1951,6 +2323,24 @@ files = [ {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, ] +[[package]] +name = "rich" +version = "13.7.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "rpds-py" version = "0.12.0" @@ -2352,6 +2742,20 @@ test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3 timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, + {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + [[package]] name = "strict-rfc3339" version = "0.7" @@ -2373,6 +2777,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, +] + [[package]] name = "types-python-dateutil" version = "2.8.19.14" @@ -2461,4 +2876,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "de315835126a5720f02fa89d6cfbfb3248bd99f37512b176fe45953587153eda" +content-hash = "a2a14c4e500e47d631925fad2b11c01063ae088b512558bc657a2bfcc639df84" diff --git a/queue_services/entity-bn/pyproject.toml b/queue_services/entity-bn/pyproject.toml index 4dfc7558db..1cc0e7b54b 100644 --- a/queue_services/entity-bn/pyproject.toml +++ b/queue_services/entity-bn/pyproject.toml @@ -26,10 +26,133 @@ attrs = "^23.1.0" dpath = "^2.1.6" [tool.poetry.group.dev.dependencies] -black = "^23.3.0" pytest = "^7.4.0" pytest-mock = "^3.11.1" +pytest-cov = "^4.0.0" requests-mock = "^1.11.0" +black = "^23.12.1" +pylint = "^3.0.3" +bandit = "^1.7.6" +flake8-pyproject = "^1.2.3" +isort = "^5.13.2" + +[tool.bandit] +exclude_dirs = [".venv","tests"] +skips = ["B104"] + +[tool.flake8] +ignore = ["F401","E402", "Q000", "E203", "W503"] +exclude = [ + ".venv", + ".git", + ".history", + "devops", + "*migrations*", +] +per-file-ignores = [ + "__init__.py:F401", + "*.py:B902" +] +max-line-length = 120 +docstring-min-length=10 +count = true + +[tool.black] +target-version = ["py310", "py311", "py312"] +line-length = 120 +include = '\.pyi?$' +extend-exclude = ''' +/( + # The following are specific to Black, you probably don't want those. + migrations + | devops +)/ +''' + +[tool.isort] +atomic = true +profile = "black" +line_length = 120 +skip_gitignore = true +skip_glob = ["migrations", "devops"] + +[tool.pylint.main] +fail-under = 10 +max-line-length = 120 +ignore = [ "migrations", "devops", "tests"] +ignore-patterns = ["^\\.#"] +ignored-modules= ["flask_sqlalchemy", "sqlalchemy", "SQLAlchemy" , "alembic", "scoped_session"] +ignored-classes= "scoped_session" +ignore-long-lines = "^\\s*(# )??$" +extension-pkg-whitelist = "pydantic" +notes = ["FIXME","XXX","TODO"] +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +disable = "C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101" +argument-naming-style = "snake_case" +attr-naming-style = "snake_case" +class-attribute-naming-style = "any" +class-const-naming-style = "UPPER_CASE" +class-naming-style = "PascalCase" +const-naming-style = "UPPER_CASE" +function-naming-style = "snake_case" +inlinevar-naming-style = "any" +method-naming-style = "snake_case" +module-naming-style = "any" +variable-naming-style = "snake_case" +docstring-min-length = -1 +good-names = ["i", "j", "k", "ex", "Run", "_"] +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +valid-classmethod-first-arg = ["cls"] +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pytest.ini_options] +minversion = "2.0" +testpaths = [ + "tests", +] +addopts = "--verbose --strict -p no:warnings --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml" +python_files = [ + "test*.py" +] +norecursedirs = [ + ".git", ".tox", "venv*", "requirements*", "build", +] +log_cli = true +log_cli_level = "1" +filterwarnings = [ + "ignore::UserWarning" +] +markers = [ + "slow", + "serial", +] + +[tool.coverage.run] +branch = true +source = [ + "src/entity-bn", +] +omit = [ + "wsgi.py", + "gunicorn_config.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "from", + "import", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + 'if __name__ == "__main__":', +] [build-system] requires = ["poetry-core"] diff --git a/queue_services/entity-bn/src/entity_bn/__init__.py b/queue_services/entity-bn/src/entity_bn/__init__.py index c289c2a78e..e2eb6a5f38 100644 --- a/queue_services/entity-bn/src/entity_bn/__init__.py +++ b/queue_services/entity-bn/src/entity_bn/__init__.py @@ -23,8 +23,7 @@ from legal_api.utils.run_version import get_run_version from sentry_sdk.integrations.flask import FlaskIntegration -from .config import Config -from .config import Production +from .config import Config, Production from .resources import register_endpoints from .services import queue diff --git a/queue_services/entity-bn/src/entity_bn/bn_processors/__init__.py b/queue_services/entity-bn/src/entity_bn/bn_processors/__init__.py index 4f7befc580..50ef5dd203 100644 --- a/queue_services/entity-bn/src/entity_bn/bn_processors/__init__.py +++ b/queue_services/entity-bn/src/entity_bn/bn_processors/__init__.py @@ -29,6 +29,8 @@ @dataclass class Message: + """Process message class""" + id: Optional[str] = None type: Optional[str] = None filing_id: Optional[str] = None @@ -37,9 +39,10 @@ class Message: business_number: Optional[str] = None +# pylint: disable-next=invalid-name bn_note = ( "Cannot inform CRA about this change before receiving " - + "Business Number (BN15). Modify the " # pylint: disable=invalid-name + + "Business Number (BN15). Modify the " + "request xml by providing businessRegistrationNumber, businessProgramIdentifier and " + "businessProgramAccountReferenceNumber before resubmitting it." ) @@ -61,9 +64,7 @@ class Message: } -def get_business_type_and_sub_type_code( - legal_type: str, business_owned: bool, owner_legal_type: str -): +def get_business_type_and_sub_type_code(legal_type: str, business_owned: bool, owner_legal_type: str): """Get business_type and business_sub_type.""" business_type = None business_sub_type = None @@ -114,9 +115,7 @@ def get_business_type_and_sub_type_code( def build_input_xml(template_name, data): """Build input XML.""" - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/{template_name}.xml' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/{template_name}.xml').read_text() jnja_template = Template(template, autoescape=True) return jnja_template.render(data) @@ -145,9 +144,7 @@ def request_bn_hub(input_xml): url = current_app.config.get("BN_HUB_API_URL") username = current_app.config.get("BN_HUB_CLIENT_ID") secret = current_app.config.get("BN_HUB_CLIENT_SECRET") - response = requests.get( - url=url, params={"inputXML": input_xml}, auth=(username, secret) - ) + response = requests.get(url=url, params={"inputXML": input_xml}, auth=(username, secret)) return response.status_code, response.text except requests.exceptions.RequestException as err: structured_log(request, "ERROR", str(err)) @@ -167,10 +164,7 @@ def get_owners_legal_type(entity_role: EntityRole): data = response.json() if results := data.get("searchResults", {}).get("results"): for entity in results: - if ( - entity.get("identifier") - == entity_role.related_colin_entity.identifier - ): + if entity.get("identifier") == entity_role.related_colin_entity.identifier: return entity.get("legalType") return None except (requests.exceptions.RequestException, requests.exceptions.HTTPError) as err: diff --git a/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py b/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py index 5c2fd74893..6a94611a95 100644 --- a/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py +++ b/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py @@ -18,17 +18,9 @@ import dpath from flask import current_app -from legal_api.models import ( - Address, - EntityRole, - Filing, - LegalEntity, - RequestTracker, - db, -) +from legal_api.models import Address, EntityRole, Filing, LegalEntity, RequestTracker, db from legal_api.utils.datetime import datetime from legal_api.utils.legislation_datetime import LegislationDatetime -from sql_versioning import history_cls from sqlalchemy import and_ from entity_bn.bn_processors import ( @@ -41,25 +33,19 @@ from entity_bn.exceptions import BNException, BNRetryExceededException -def process( - legal_entity: LegalEntity, filing: Filing -): # pylint: disable=too-many-branches +def process(legal_entity: LegalEntity, filing: Filing): # pylint: disable=too-many-branches """Process the incoming change of registration request.""" - if filing.meta_data and filing.meta_data.get("changeOfRegistration", {}).get( - "toBusinessName" - ): + if filing.meta_data and filing.meta_data.get("changeOfRegistration", {}).get("toBusinessName"): change_name(legal_entity, filing, RequestTracker.RequestType.CHANGE_NAME) with suppress(KeyError, ValueError): - if dpath.util.get( - filing.filing_json, "filing/changeOfRegistration/parties" - ) and has_party_name_changed(legal_entity, filing): + if dpath.util.get(filing.filing_json, "filing/changeOfRegistration/parties") and has_party_name_changed( + legal_entity, filing + ): change_name(legal_entity, filing, RequestTracker.RequestType.CHANGE_PARTY) with suppress(KeyError, ValueError): - if dpath.util.get( - filing.filing_json, "filing/changeOfRegistration/offices/businessOffice" - ): + if dpath.util.get(filing.filing_json, "filing/changeOfRegistration/offices/businessOffice"): if has_previous_address( filing.id, legal_entity.office_delivery_address.one_or_none().office_id, @@ -90,9 +76,7 @@ def change_name( ): """Inform CRA about change of name.""" max_retry = current_app.config.get("BN_HUB_MAX_RETRY") - request_trackers = RequestTracker.find_by( - legal_entity.id, RequestTracker.ServiceName.BN_HUB, name_type, filing.id - ) + request_trackers = RequestTracker.find_by(legal_entity.id, RequestTracker.ServiceName.BN_HUB, name_type, filing.id) if not request_trackers: request_tracker = RequestTracker() request_tracker.legal_entity_id = legal_entity.id @@ -101,9 +85,7 @@ def change_name( request_tracker.service_name = RequestTracker.ServiceName.BN_HUB request_tracker.retry_number = 0 request_tracker.is_processed = False - elif ( - request_tracker := request_trackers.pop() - ) and not request_tracker.is_processed: + elif (request_tracker := request_trackers.pop()) and not request_tracker.is_processed: request_tracker.last_modified = datetime.utcnow() request_tracker.retry_number += 1 @@ -124,7 +106,7 @@ def change_name( elif name_type == RequestTracker.RequestType.CHANGE_PARTY: new_name = legal_entity.legal_name - alternate_name = legal_entity._alternate_names.first() + alternate_name = legal_entity._alternate_names.first() # pylint: disable=protected-access bn15 = alternate_name.bn15 input_xml = build_input_xml( @@ -192,25 +174,21 @@ def change_address( request_tracker.service_name = RequestTracker.ServiceName.BN_HUB request_tracker.retry_number = 0 request_tracker.is_processed = False - elif ( - request_tracker := request_trackers.pop() - ) and not request_tracker.is_processed: + elif (request_tracker := request_trackers.pop()) and not request_tracker.is_processed: request_tracker.last_modified = datetime.utcnow() request_tracker.retry_number += 1 if request_tracker.is_processed: return - effective_date = LegislationDatetime.as_legislation_timezone( - filing.effective_date - ).strftime("%Y-%m-%d") + effective_date = LegislationDatetime.as_legislation_timezone(filing.effective_date).strftime("%Y-%m-%d") address = ( legal_entity.office_delivery_address if address_type == RequestTracker.RequestType.CHANGE_DELIVERY_ADDRESS else legal_entity.office_mailing_address ) - alternate_name = legal_entity._alternate_names.first() + alternate_name = legal_entity._alternate_names.first() # pylint: disable=protected-access bn15 = alternate_name.bn15 input_xml = build_input_xml( @@ -255,9 +233,9 @@ def change_address( # TODO: Fix below functions (and add unit test) to check history data once we have clarity on the versioning changes -def has_previous_address( - transaction_id: int, office_id: int, address_type: str -) -> bool: +# pylint: disable-all; delete this line once functions fixed +# flake8: noqa; delete this line once functions fixed +def has_previous_address(transaction_id: int, office_id: int, address_type: str) -> bool: """Has previous address for the given transaction and office id.""" address_version = version_class(Address) address = ( @@ -280,23 +258,17 @@ def has_party_name_changed(legal_entity: LegalEntity, filing: Filing) -> bool: .filter(party_role_version.transaction_id == filing.transaction_id) .filter(party_role_version.operation_type != 2) .filter(party_role_version.business_id == business.id) - .filter( - party_role_version.role - in (PartyRole.RoleTypes.PARTNER.value, PartyRole.RoleTypes.PROPRIETOR.value) - ) + .filter(party_role_version.role in (PartyRole.RoleTypes.PARTNER.value, PartyRole.RoleTypes.PROPRIETOR.value)) .all() ) - if ( - len(party_roles) > 0 - ): # New party added or party deleted by setting cessation_date + if len(party_roles) > 0: # New party added or party deleted by setting cessation_date return True party_names = {} for party_role in business.party_roles.all(): if ( - party_role.role.lower() - in (PartyRole.RoleTypes.PARTNER.value, PartyRole.RoleTypes.PROPRIETOR.value) + party_role.role.lower() in (PartyRole.RoleTypes.PARTNER.value, PartyRole.RoleTypes.PROPRIETOR.value) and party_role.cessation_date is None ): party_names[party_role.party.id] = party_role.party.name @@ -313,11 +285,7 @@ def _get_name(party) -> str: """Return the full name of the party for comparison.""" if party.party_type == Party.PartyTypes.PERSON.value: if party.middle_initial: - return ( - " ".join((party.first_name, party.middle_initial, party.last_name)) - .strip() - .upper() - ) + return " ".join((party.first_name, party.middle_initial, party.last_name)).strip().upper() return " ".join((party.first_name, party.last_name)).strip().upper() return party.organization_name.strip().upper() diff --git a/queue_services/entity-bn/src/entity_bn/bn_processors/correction.py b/queue_services/entity-bn/src/entity_bn/bn_processors/correction.py index f9f647677a..ccc885640c 100644 --- a/queue_services/entity-bn/src/entity_bn/bn_processors/correction.py +++ b/queue_services/entity-bn/src/entity_bn/bn_processors/correction.py @@ -25,25 +25,19 @@ ) -def process( - legal_entity: LegalEntity, filing: Filing -): # pylint: disable=too-many-branches +def process(legal_entity: LegalEntity, filing: Filing): # pylint: disable=too-many-branches """Process the incoming correction request.""" - if filing.meta_data and filing.meta_data.get("correction", {}).get( - "toBusinessName" - ): + if filing.meta_data and filing.meta_data.get("correction", {}).get("toBusinessName"): change_name(legal_entity, filing, RequestTracker.RequestType.CHANGE_NAME) with suppress(KeyError, ValueError): - if dpath.util.get( - filing.filing_json, "filing/correction/parties" - ) and has_party_name_changed(legal_entity, filing): + if dpath.util.get(filing.filing_json, "filing/correction/parties") and has_party_name_changed( + legal_entity, filing + ): change_name(legal_entity, filing, RequestTracker.RequestType.CHANGE_PARTY) with suppress(KeyError, ValueError): - if dpath.util.get( - filing.filing_json, "filing/correction/offices/businessOffice" - ): + if dpath.util.get(filing.filing_json, "filing/correction/offices/businessOffice"): if has_previous_address( filing.id, legal_entity.office_delivery_address.one_or_none().office_id, diff --git a/queue_services/entity-bn/src/entity_bn/bn_processors/dissolution_or_put_back_on.py b/queue_services/entity-bn/src/entity_bn/bn_processors/dissolution_or_put_back_on.py index 362145a34a..a34cf3f054 100644 --- a/queue_services/entity-bn/src/entity_bn/bn_processors/dissolution_or_put_back_on.py +++ b/queue_services/entity-bn/src/entity_bn/bn_processors/dissolution_or_put_back_on.py @@ -21,18 +21,11 @@ from legal_api.utils.datetime import datetime from legal_api.utils.legislation_datetime import LegislationDatetime -from entity_bn.bn_processors import ( - bn_note, - build_input_xml, - get_splitted_business_number, - request_bn_hub, -) +from entity_bn.bn_processors import bn_note, build_input_xml, get_splitted_business_number, request_bn_hub from entity_bn.exceptions import BNException, BNRetryExceededException -def process( - legal_entity: LegalEntity, filing: Filing -): # pylint: disable=too-many-branches +def process(legal_entity: LegalEntity, filing: Filing): # pylint: disable=too-many-branches """Process the incoming dissolution/putBackOn request (SP/GP).""" max_retry = current_app.config.get("BN_HUB_MAX_RETRY") request_trackers = RequestTracker.find_by( @@ -49,23 +42,19 @@ def process( request_tracker.service_name = RequestTracker.ServiceName.BN_HUB request_tracker.retry_number = 0 request_tracker.is_processed = False - elif ( - request_tracker := request_trackers.pop() - ) and not request_tracker.is_processed: + elif (request_tracker := request_trackers.pop()) and not request_tracker.is_processed: request_tracker.last_modified = datetime.utcnow() request_tracker.retry_number += 1 if request_tracker.is_processed: return - effective_date = LegislationDatetime.as_legislation_timezone( - filing.effective_date - ).strftime("%Y-%m-%d") + effective_date = LegislationDatetime.as_legislation_timezone(filing.effective_date).strftime("%Y-%m-%d") program_account_status_code = {"putBackOn": "01", "dissolution": "02"} program_account_reason_code = {"putBackOn": None, "dissolution": "105"} - alternate_name = legal_entity._alternate_names.first() + alternate_name = legal_entity._alternate_names.first() # pylint: disable=protected-access bn15 = alternate_name.bn15 input_xml = build_input_xml( diff --git a/queue_services/entity-bn/src/entity_bn/bn_processors/registration.py b/queue_services/entity-bn/src/entity_bn/bn_processors/registration.py index 6320675c72..95d0d5d9ce 100644 --- a/queue_services/entity-bn/src/entity_bn/bn_processors/registration.py +++ b/queue_services/entity-bn/src/entity_bn/bn_processors/registration.py @@ -37,23 +37,23 @@ from entity_bn.services import queue from entity_bn.services.logging import structured_log - FIRMS = ("SP", "GP") CORPS = ("BEN", "BC", "ULC", "CC") def process( - legal_entity: LegalEntity, # pylint: disable=too-many-branches, too-many-arguments, too-many-statements + legal_entity: LegalEntity, is_admin: bool = False, msg: Message = None, skip_build=False, -): +): # pylint: disable=too-many-branches, too-many-arguments, too-many-statements """Process the incoming registration request.""" max_retry = current_app.config.get("BN_HUB_MAX_RETRY") message_id, business_number = None, None if is_admin: if not msg: + # pylint: disable-next=broad-exception-raised raise Exception("code issue: msg is required for admin request") message_id = msg.id @@ -77,9 +77,7 @@ def process( inform_cra_tracker.is_processed = False inform_cra_tracker.is_admin = is_admin inform_cra_tracker.message_id = message_id - elif ( - inform_cra_tracker := request_trackers.pop() - ) and not inform_cra_tracker.is_processed: + elif (inform_cra_tracker := request_trackers.pop()) and not inform_cra_tracker.is_processed: inform_cra_tracker.last_modified = datetime.utcnow() inform_cra_tracker.retry_number += 1 @@ -149,11 +147,8 @@ def process( ) mail_topic = current_app.config.get("ENTITY_MAILER_TOPIC", "mailer") - queue.publish(topic=mail_topic, - payload=queue.to_queue_message(cloud_event)) - except ( - Exception - ) as err: # pylint: disable=broad-except, unused-variable # noqa F841; + queue.publish(topic=mail_topic, payload=queue.to_queue_message(cloud_event)) + except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841; structured_log( request, "ERROR", @@ -162,11 +157,11 @@ def process( def _inform_cra( - legal_entity: LegalEntity, # pylint: disable=too-many-locals + legal_entity: LegalEntity, request_tracker: RequestTracker, business_number: str, skip_build: bool, -): +): # pylint: disable=too-many-locals """Inform CRA about new registration.""" if request_tracker.is_processed: return @@ -179,24 +174,17 @@ def _inform_cra( owner_legal_type = None business_owned = False # True when SP is owned by org - founding_date = LegislationDatetime.as_legislation_timezone( - legal_entity.founding_date - ).strftime("%Y-%m-%d") + founding_date = LegislationDatetime.as_legislation_timezone(legal_entity.founding_date).strftime("%Y-%m-%d") parties = [] if is_firms: parties = legal_entity.entity_roles.all() entity_role = parties[0] party = ( - entity_role.related_colin_entity - if entity_role.is_related_colin_entity - else entity_role.related_entity + entity_role.related_colin_entity if entity_role.is_related_colin_entity else entity_role.related_entity ) if legal_entity.entity_type == "SP" and ( - ( - isinstance(party, LegalEntity) - and party.entity_type == LegalEntity.EntityTypes.ORGANIZATION.value - ) + (isinstance(party, LegalEntity) and party.entity_type == LegalEntity.EntityTypes.ORGANIZATION.value) or isinstance(party, ColinEntity) ): business_owned = True @@ -211,9 +199,7 @@ def _inform_cra( ( business_type_code, business_sub_type_code, - ) = get_business_type_and_sub_type_code( - legal_entity.entity_type, business_owned, owner_legal_type - ) + ) = get_business_type_and_sub_type_code(legal_entity.entity_type, business_owned, owner_legal_type) retry_number = str(request_tracker.retry_number) if request_tracker.message_id: @@ -251,23 +237,18 @@ def _inform_cra( request_tracker.save() -def _get_bn( - legal_entity: LegalEntity, request_tracker: RequestTracker, transaction_id: str -): +def _get_bn(legal_entity: LegalEntity, request_tracker: RequestTracker, transaction_id: str): """Get business number from CRA.""" if request_tracker.is_processed: return request_tracker.request_object = f"{legal_entity.identifier}/{transaction_id}" - status_code, response = _get_program_account( - legal_entity.identifier, transaction_id - ) + status_code, response = _get_program_account(legal_entity.identifier, transaction_id) if status_code == HTTPStatus.OK: - program_account_ref_no = str( - response["program_account_ref_no"]).zfill(4) + program_account_ref_no = str(response["program_account_ref_no"]).zfill(4) bn15 = f"{response['business_no']}{response['business_program_id']}{program_account_ref_no}" - alternate_name = legal_entity._alternate_names.first() + alternate_name = legal_entity._alternate_names.first() # pylint: disable=protected-access alternate_name.bn15 = bn15 legal_entity.save() request_tracker.is_processed = True @@ -283,10 +264,11 @@ def _get_program_account(identifier, transaction_id): # Use Test environment for testing. token = AccountService.get_bearer_token() url = f'{current_app.config["COLIN_API"]}/programAccount/{identifier}/{transaction_id}' - response = requests.get(url, - headers={**AccountService.CONTENT_TYPE_JSON, - "Authorization": AccountService.BEARER + token}, - timeout=AccountService.timeout) + response = requests.get( + url, + headers={**AccountService.CONTENT_TYPE_JSON, "Authorization": AccountService.BEARER + token}, + timeout=AccountService.timeout, + ) return response.status_code, response.json() except requests.exceptions.RequestException as err: structured_log(request, "ERROR", str(err)) diff --git a/queue_services/entity-bn/src/entity_bn/config.py b/queue_services/entity-bn/src/entity_bn/config.py index 26c6df9734..1925160aab 100644 --- a/queue_services/entity-bn/src/entity_bn/config.py +++ b/queue_services/entity-bn/src/entity_bn/config.py @@ -23,7 +23,6 @@ from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) @@ -56,15 +55,14 @@ class Config: # pylint: disable=too-few-public-methods # POSTGRESQL if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): - SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" - else: SQLALCHEMY_DATABASE_URI = ( - f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" + f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" ) + else: + SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" # legislative timezone for future effective dating - LEGISLATIVE_TIMEZONE = os.getenv( - "LEGISLATIVE_TIMEZONE", "America/Vancouver") + LEGISLATIVE_TIMEZONE = os.getenv("LEGISLATIVE_TIMEZONE", "America/Vancouver") TEMPLATE_PATH = os.getenv("TEMPLATE_PATH", None) # API Endpoints @@ -83,12 +81,8 @@ class Config: # pylint: disable=too-few-public-methods GCP_AUTH_KEY = os.getenv("GCP_AUTH_KEY", None) ENTITY_MAILER_TOPIC = os.getenv("ENTITY_MAILER_TOPIC", "mailer") ENTITY_EVENT_TOPIC = os.getenv("ENTITY_EVENT_TOPIC", "event") - AUDIENCE = os.getenv( - "AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber" - ) - PUBLISHER_AUDIENCE = os.getenv( - "PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher" - ) + AUDIENCE = os.getenv("AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber") + PUBLISHER_AUDIENCE = os.getenv("PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher") class Development(Config): # pylint: disable=too-few-public-methods diff --git a/queue_services/entity-bn/src/entity_bn/resources/__init__.py b/queue_services/entity-bn/src/entity_bn/resources/__init__.py index 76a48d5f5e..baf4ea6479 100644 --- a/queue_services/entity-bn/src/entity_bn/resources/__init__.py +++ b/queue_services/entity-bn/src/entity_bn/resources/__init__.py @@ -41,6 +41,7 @@ def register_endpoints(app: Flask): + """Register endpoints with the flask application""" # Allow base route to match with, and without a trailing slash app.url_map.strict_slashes = False diff --git a/queue_services/entity-bn/src/entity_bn/resources/worker.py b/queue_services/entity-bn/src/entity_bn/resources/worker.py index 29ed7afc58..1b3a05d5d5 100644 --- a/queue_services/entity-bn/src/entity_bn/resources/worker.py +++ b/queue_services/entity-bn/src/entity_bn/resources/worker.py @@ -37,9 +37,7 @@ from contextlib import suppress from http import HTTPStatus -from flask import Blueprint -from flask import current_app -from flask import request +from flask import Blueprint, current_app, request from legal_api.models import Filing, LegalEntity from sentry_sdk import capture_message from simple_cloudevent import SimpleCloudEvent @@ -115,12 +113,9 @@ def worker(): ) with suppress(Exception): event_topic = current_app.config.get("ENTITY_EVENT_TOPIC", "filer") - ret = queue.publish( - topic=event_topic, payload=queue.to_queue_message(cloud_event) - ) - structured_log( - request, "INFO", f"publish to entity event: {message.identifier}" - ) + # pylint: disable-next=unused-variable + ret = queue.publish(topic=event_topic, payload=queue.to_queue_message(cloud_event)) # noqa: F841 + structured_log(request, "INFO", f"publish to entity event: {message.identifier}") structured_log(request, "INFO", f"completed ce: {str(ce)}") return {}, HTTPStatus.OK @@ -177,11 +172,11 @@ def process_cra_request( filing: Filing = Filing.find_by_id(msg.filing_id) if not filing: - raise Exception + raise Exception # pylint: disable=broad-exception-raised legal_entity: LegalEntity = LegalEntity.find_by_internal_id(filing.legal_entity_id) if not legal_entity: - raise Exception + raise Exception # pylint: disable=broad-exception-raised if filing.filing_type == "registration": registration.process(legal_entity) diff --git a/queue_services/entity-bn/src/entity_bn/services/__init__.py b/queue_services/entity-bn/src/entity_bn/services/__init__.py index 19d346fa93..aeb71bc835 100644 --- a/queue_services/entity-bn/src/entity_bn/services/__init__.py +++ b/queue_services/entity-bn/src/entity_bn/services/__init__.py @@ -34,5 +34,4 @@ """This module contains all the services used.""" from .gcp_queue import GcpQueue - queue = GcpQueue() diff --git a/queue_services/entity-bn/src/entity_bn/services/gcp_queue.py b/queue_services/entity-bn/src/entity_bn/services/gcp_queue.py index e29cf53c54..258c707ea1 100644 --- a/queue_services/entity-bn/src/entity_bn/services/gcp_queue.py +++ b/queue_services/entity-bn/src/entity_bn/services/gcp_queue.py @@ -36,24 +36,29 @@ import base64 import json -from concurrent.futures import CancelledError from concurrent.futures import TimeoutError # pylint: disable=W0622 +from concurrent.futures import CancelledError from contextlib import suppress from typing import Optional from flask import Flask, current_app -from werkzeug.local import LocalProxy from google.auth import jwt from google.cloud import pubsub_v1 -from simple_cloudevent import CloudEventVersionException -from simple_cloudevent import InvalidCloudEventError -from simple_cloudevent import SimpleCloudEvent -from simple_cloudevent import from_queue_message -from simple_cloudevent import to_queue_message +from simple_cloudevent import ( + CloudEventVersionException, + InvalidCloudEventError, + SimpleCloudEvent, + from_queue_message, + to_queue_message, +) +from werkzeug.local import LocalProxy class GcpQueue: + """Provides Queue type services""" + def __init__(self, app: Flask = None): + """Initializes the GCP Queue class""" self.audience = None self.credentials_pub = None self.gcp_auth_key = None @@ -62,9 +67,10 @@ def __init__(self, app: Flask = None): self._publisher = None if app: - self.app_init(app) + self.init_app(app) def init_app(self, app: Flask): + """Initializes the application""" self.gcp_auth_key = app.config.get("GCP_AUTH_KEY") if self.gcp_auth_key: try: @@ -77,32 +83,24 @@ def init_app(self, app: Flask): "https://pubsub.googleapis.com/google.pubsub.v1.Publisher", ) - self.service_account_info = json.loads( - base64.b64decode(self.gcp_auth_key).decode("utf-8") - ) - credentials = jwt.Credentials.from_service_account_info( - self.service_account_info, audience=audience - ) - self.credentials_pub = credentials.with_claims( - audience=publisher_audience - ) + self.service_account_info = json.loads(base64.b64decode(self.gcp_auth_key).decode("utf-8")) + credentials = jwt.Credentials.from_service_account_info(self.service_account_info, audience=audience) + self.credentials_pub = credentials.with_claims(audience=publisher_audience) except Exception as error: # noqa: B902 - raise Exception( - "Unable to create a connection", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to create a connection", error) from error # pylint: disable=W0719 @property def publisher(self): + """Returns the publisher""" if not self._publisher and self.credentials_pub: - self._publisher = pubsub_v1.PublisherClient( - credentials=self.credentials_pub - ) + self._publisher = pubsub_v1.PublisherClient(credentials=self.credentials_pub) else: self._publisher = pubsub_v1.PublisherClient() return self.credentials_pub @staticmethod def is_valid_envelope(msg: dict): + """Checks if the envelope is valid""" if ( msg.get("subscription") and (message := msg.get("message")) @@ -114,17 +112,14 @@ def is_valid_envelope(msg: dict): @staticmethod def get_envelope(request: LocalProxy) -> Optional[dict]: + """Returns the envelope""" with suppress(Exception): - if (envelope := request.get_json()) and GcpQueue.is_valid_envelope( - envelope - ): + if (envelope := request.get_json()) and GcpQueue.is_valid_envelope(envelope): return envelope return None @staticmethod - def get_simple_cloud_event( - request: LocalProxy, return_raw: bool = False - ) -> type[SimpleCloudEvent | dict | None]: + def get_simple_cloud_event(request: LocalProxy, return_raw: bool = False) -> type[SimpleCloudEvent | dict | None]: """Return a SimpleCloudEvent if one is in session from the PubSub call. Parameters @@ -176,14 +171,14 @@ def publish(self, topic: str, payload: bytes): return future.result() except (CancelledError, TimeoutError) as error: - raise Exception( - "Unable to post to queue", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to post to queue", error) from error # pylint: disable=W0719 @staticmethod def to_queue_message(ce: SimpleCloudEvent): + """Return a byte string of the CloudEvent in JSON format""" return to_queue_message(ce) @staticmethod def from_queue_message(data: dict): + """Convert a queue message back to a simple CloudEvent""" return from_queue_message(data) diff --git a/queue_services/entity-bn/src/entity_bn/services/logging.py b/queue_services/entity-bn/src/entity_bn/services/logging.py index 8fb46cbb79..a72b49e940 100644 --- a/queue_services/entity-bn/src/entity_bn/services/logging.py +++ b/queue_services/entity-bn/src/entity_bn/services/logging.py @@ -40,29 +40,28 @@ def structured_log(request: LocalProxy, severity: str = "NOTICE", message: str = None): + """Prints structured log message""" frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) # Build structured log messages as an object. global_log_fields = {} - if PROJECT := os.environ.get("GOOGLE_CLOUD_PROJECT"): + if project := os.environ.get("GOOGLE_CLOUD_PROJECT"): # Add log correlation to nest all log messages. trace_header = request.headers.get("X-Cloud-Trace-Context") - if trace_header and PROJECT: + if trace_header and project: trace = trace_header.split("/") - global_log_fields[ - "logging.googleapis.com/trace" - ] = f"projects/{PROJECT}/traces/{trace[0]}" + global_log_fields["logging.googleapis.com/trace"] = f"projects/{project}/traces/{trace[0]}" # Complete a structured log entry. - entry = dict( - severity=severity, - message=message, + entry = { + "severity": severity, + "message": message, # Log viewer accesses 'component' as jsonPayload.component'. - component=f"{mod.__name__}.{frm.function}", + "component": f"{mod.__name__}.{frm.function}", **global_log_fields, - ) + } print(json.dumps(entry)) diff --git a/queue_services/entity-bn/tests/__init__.py b/queue_services/entity-bn/tests/__init__.py index 711bff39e5..2188ff0a42 100644 --- a/queue_services/entity-bn/tests/__init__.py +++ b/queue_services/entity-bn/tests/__init__.py @@ -14,7 +14,6 @@ """The Test Suites to ensure that the service is built and operating correctly.""" import datetime - EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362) diff --git a/queue_services/entity-bn/tests/conftest.py b/queue_services/entity-bn/tests/conftest.py index 14aafd8030..7bbe9060e1 100644 --- a/queue_services/entity-bn/tests/conftest.py +++ b/queue_services/entity-bn/tests/conftest.py @@ -16,19 +16,20 @@ import os from contextlib import contextmanager, suppress from typing import Final -from flask_migrate import Migrate, upgrade -import psycopg2 +import psycopg2 import pytest +from flask_migrate import Migrate, upgrade from legal_api import db as _db -from sqlalchemy import create_engine, event, exc as sqlalchemy_exc, text -from entity_bn import create_app +from sqlalchemy import create_engine, event +from sqlalchemy import exc as sqlalchemy_exc +from sqlalchemy import text +from entity_bn import create_app from entity_bn.config import Testing from . import FROZEN_DATETIME - DB_TEST_NAME: Final = os.getenv("DATABASE_TEST_NAME") @@ -67,9 +68,7 @@ def create_test_db( DATABASE_URI = DATABASE_URI[: DATABASE_URI.rfind("/")] + "/postgres" try: - with create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(f"CREATE DATABASE {database}")) return True @@ -100,12 +99,8 @@ def drop_test_db( WHERE pg_stat_activity.datname = '{database}' AND pid <> pg_backend_pid(); """ - with suppress( - sqlalchemy_exc.ProgrammingError, psycopg2.OperationalError, Exception - ): - with create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with suppress(sqlalchemy_exc.ProgrammingError, psycopg2.OperationalError, Exception): + with create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(close_all)) conn.execute(text(f"DROP DATABASE {database}")) @@ -206,9 +201,7 @@ def session(app, db): # pylint: disable=redefined-outer-name, invalid-name @event.listens_for(sess(), "after_transaction_end") def restart_savepoint(sess2, trans): # pylint: disable=unused-variable # Detecting whether this is indeed the nested transaction of the test - if ( - trans.nested and not trans._parent.nested - ): # pylint: disable=protected-access + if trans.nested and not trans._parent.nested: # pylint: disable=protected-access # Handle where test DOESN'T session.commit(), sess2.expire_all() sess.begin_nested() diff --git a/queue_services/entity-bn/tests/pytest_marks.py b/queue_services/entity-bn/tests/pytest_marks.py index 7d0320f9e0..4fcd3a58ef 100644 --- a/queue_services/entity-bn/tests/pytest_marks.py +++ b/queue_services/entity-bn/tests/pytest_marks.py @@ -17,8 +17,7 @@ import pytest from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) -skip_in_pod = pytest.mark.skipif((os.getenv('POD_TESTING', False) is False), reason='Skip test when running in pod') +skip_in_pod = pytest.mark.skipif((os.getenv("POD_TESTING", False) is False), reason="Skip test when running in pod") diff --git a/queue_services/entity-bn/tests/unit/__init__.py b/queue_services/entity-bn/tests/unit/__init__.py index fbb2d7a2c8..04cc6700ce 100644 --- a/queue_services/entity-bn/tests/unit/__init__.py +++ b/queue_services/entity-bn/tests/unit/__init__.py @@ -134,9 +134,7 @@ def create_related_entity(party_json): country="CA", postal_code=party_json["mailingAddress"]["postalCode"], region=party_json["mailingAddress"]["addressRegion"], - delivery_instructions=party_json["mailingAddress"] - .get("deliveryInstructions", "") - .upper(), + delivery_instructions=party_json["mailingAddress"].get("deliveryInstructions", "").upper(), ) new_party.entity_mailing_address = mailing_address if party_json.get("deliveryAddress"): @@ -146,17 +144,13 @@ def create_related_entity(party_json): country="CA", postal_code=party_json["deliveryAddress"]["postalCode"], region=party_json["deliveryAddress"]["addressRegion"], - delivery_instructions=party_json["deliveryAddress"] - .get("deliveryInstructions", "") - .upper(), + delivery_instructions=party_json["deliveryAddress"].get("deliveryInstructions", "").upper(), ) new_party.entity_delivery_address = delivery_address return new_party -def create_entity_role( - legal_entity, related_entity, roles, appointment_date=EPOCH_DATETIME -): +def create_entity_role(legal_entity, related_entity, roles, appointment_date=EPOCH_DATETIME): """Create party roles.""" from legal_api.models import EntityRole @@ -212,9 +206,7 @@ def create_registration_data(legal_type, identifier="FM1234567", bn9=None, tax_i change_filing_id=filing.id, ) - related_entity = create_related_entity( - person_json if legal_type == "SP" else org_json - ) + related_entity = create_related_entity(person_json if legal_type == "SP" else org_json) role = "proprietor" if legal_type == "SP" else "partner" create_entity_role(legal_entity, related_entity, [role]) legal_entity.save() @@ -225,9 +217,7 @@ def create_registration_data(legal_type, identifier="FM1234567", bn9=None, tax_i return filing.id, legal_entity.id -def get_json_message( - filing_id, identifier, message_id, type, request_type=None, business_number=None -): +def get_json_message(filing_id, identifier, message_id, type, request_type=None, business_number=None): CLOUD_EVENT = SimpleCloudEvent( source="fake-for-tests", subject="fake-subject", diff --git a/queue_services/entity-bn/tests/unit/bn_processors/test_admin.py b/queue_services/entity-bn/tests/unit/bn_processors/test_admin.py index 72cb67e305..61fdcb7eaa 100644 --- a/queue_services/entity-bn/tests/unit/bn_processors/test_admin.py +++ b/queue_services/entity-bn/tests/unit/bn_processors/test_admin.py @@ -12,18 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. """The Test Suites to ensure that the admin is operating correctly.""" -from http import HTTPStatus import secrets import string import uuid import xml.etree.ElementTree as Et +from http import HTTPStatus import pytest from legal_api.models import LegalEntity, RequestTracker from tests.unit import create_registration_data, get_json_message - acknowledgement_response = """
@@ -66,9 +65,7 @@ def side_effect(input_xml): if root.tag == "SBNCreateProgramAccountRequest": return 200, acknowledgement_response - mocker.patch( - "entity_bn.bn_processors.registration.request_bn_hub", side_effect=side_effect - ) + mocker.patch("entity_bn.bn_processors.registration.request_bn_hub", side_effect=side_effect) business_program_id = "BC" program_account_ref_no = 1 @@ -153,9 +150,7 @@ def test_admin_resubmit(app, session, client, mocker, request_type, request_xml) """Test resubmit CRA request.""" identifier = "FM" + "".join(secrets.choice(string.digits) for _ in range(7)) message_id = str(uuid.uuid4()) - filing_id, legal_entity_id = create_registration_data( - "SP", identifier=identifier, tax_id="993775204BC0001" - ) + filing_id, legal_entity_id = create_registration_data("SP", identifier=identifier, tax_id="993775204BC0001") request_tracker = RequestTracker( request_type=request_type, retry_number=-1, @@ -170,9 +165,7 @@ def test_admin_resubmit(app, session, client, mocker, request_type, request_xml) def side_effect(input_xml): return 200, acknowledgement_response - mocker.patch( - "entity_bn.bn_processors.admin.request_bn_hub", side_effect=side_effect - ) + mocker.patch("entity_bn.bn_processors.admin.request_bn_hub", side_effect=side_effect) legal_entity = LegalEntity.find_by_internal_id(legal_entity_id) diff --git a/queue_services/entity-bn/tests/unit/bn_processors/test_change_of_registration.py b/queue_services/entity-bn/tests/unit/bn_processors/test_change_of_registration.py index 7707390a54..60428a0b21 100644 --- a/queue_services/entity-bn/tests/unit/bn_processors/test_change_of_registration.py +++ b/queue_services/entity-bn/tests/unit/bn_processors/test_change_of_registration.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. """The Test Suites to ensure that the change of registration is operating correctly.""" -from http import HTTPStatus import uuid import xml.etree.ElementTree as Et +from http import HTTPStatus import pytest from legal_api.models import RequestTracker @@ -22,7 +22,6 @@ from entity_bn.bn_processors import bn_note from tests.unit import create_filing, create_registration_data, get_json_message - message_type = "bc.registry.business.changeOfRegistration" @@ -36,16 +35,12 @@ def test_change_of_registration(app, session, client, mocker, legal_type): """Test inform cra about change of SP/GP registration.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - legal_type, identifier=identifier, tax_id="993775204BC0001" - ) + filing_id, legal_entity_id = create_registration_data(legal_type, identifier=identifier, tax_id="993775204BC0001") json_filing = { "filing": { "header": {"name": "changeOfRegistration"}, "changeOfRegistration": { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - }, + "offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}, "parties": [{}], }, } @@ -140,21 +135,15 @@ def side_effect(input_xml): ("GP", "993775204"), ], ) -def test_bn15_not_available_change_of_registration( - app, session, client, mocker, legal_type, bn9 -): +def test_bn15_not_available_change_of_registration(app, session, client, mocker, legal_type, bn9): """Skip cra call when BN15 is not available while doing a change of SP/GP registration.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - legal_type, identifier=identifier, bn9=bn9 - ) + filing_id, legal_entity_id = create_registration_data(legal_type, identifier=identifier, bn9=bn9) json_filing = { "filing": { "header": {"name": "changeOfRegistration"}, "changeOfRegistration": { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - }, + "offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}, "parties": [{}], }, } @@ -237,28 +226,18 @@ def test_bn15_not_available_change_of_registration( (RequestTracker.RequestType.CHANGE_PARTY, {"parties": [{}]}), ( RequestTracker.RequestType.CHANGE_DELIVERY_ADDRESS, - { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - } - }, + {"offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}}, ), ( RequestTracker.RequestType.CHANGE_MAILING_ADDRESS, - { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - } - }, + {"offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}}, ), ], ) def test_retry_change_of_registration(app, session, client, mocker, request_type, data): """Test retry change of SP/GP registration.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - "SP", identifier=identifier, tax_id="993775204BC0001" - ) + filing_id, legal_entity_id = create_registration_data("SP", identifier=identifier, tax_id="993775204BC0001") json_filing = { "filing": { "header": {"name": "changeOfRegistration"}, diff --git a/queue_services/entity-bn/tests/unit/bn_processors/test_correction.py b/queue_services/entity-bn/tests/unit/bn_processors/test_correction.py index 8f130a05e5..02f7e66e90 100644 --- a/queue_services/entity-bn/tests/unit/bn_processors/test_correction.py +++ b/queue_services/entity-bn/tests/unit/bn_processors/test_correction.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. """The Test Suites to ensure that the correction of registration or change of registration is operating correctly.""" -from http import HTTPStatus import uuid import xml.etree.ElementTree as Et +from http import HTTPStatus import pytest from legal_api.models import RequestTracker @@ -22,7 +22,6 @@ from entity_bn.bn_processors import bn_note from tests.unit import create_filing, create_registration_data, get_json_message - message_type = "bc.registry.business.correction" @@ -36,16 +35,12 @@ def test_correction(app, session, client, mocker, legal_type): """Test inform cra about correction of SP/GP.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - legal_type, identifier=identifier, tax_id="993775204BC0001" - ) + filing_id, legal_entity_id = create_registration_data(legal_type, identifier=identifier, tax_id="993775204BC0001") json_filing = { "filing": { "header": {"name": "correction"}, "correction": { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - }, + "offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}, "parties": [{}], }, } @@ -70,12 +65,8 @@ def side_effect(input_xml): "entity_bn.bn_processors.change_of_registration.request_bn_hub", side_effect=side_effect, ) - mocker.patch( - "entity_bn.bn_processors.correction.has_previous_address", return_value=True - ) - mocker.patch( - "entity_bn.bn_processors.correction.has_party_name_changed", return_value=True - ) + mocker.patch("entity_bn.bn_processors.correction.has_previous_address", return_value=True) + mocker.patch("entity_bn.bn_processors.correction.has_party_name_changed", return_value=True) message_id = str(uuid.uuid4()) json_data = get_json_message(filing_id, identifier, message_id, message_type) @@ -141,16 +132,12 @@ def side_effect(input_xml): def test_bn15_not_available_correction(app, session, client, mocker, legal_type, bn9): """Skip cra call when BN15 is not available while doing a correction of SP/GP.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - legal_type, identifier=identifier, bn9=bn9 - ) + filing_id, legal_entity_id = create_registration_data(legal_type, identifier=identifier, bn9=bn9) json_filing = { "filing": { "header": {"name": "correction"}, "correction": { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - }, + "offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}, "parties": [{}], }, } @@ -160,12 +147,8 @@ def test_bn15_not_available_correction(app, session, client, mocker, legal_type, filing.save() filing_id = filing.id - mocker.patch( - "entity_bn.bn_processors.correction.has_previous_address", return_value=True - ) - mocker.patch( - "entity_bn.bn_processors.correction.has_party_name_changed", return_value=True - ) + mocker.patch("entity_bn.bn_processors.correction.has_previous_address", return_value=True) + mocker.patch("entity_bn.bn_processors.correction.has_party_name_changed", return_value=True) message_id = str(uuid.uuid4()) json_data = get_json_message(filing_id, identifier, message_id, message_type) @@ -231,28 +214,18 @@ def test_bn15_not_available_correction(app, session, client, mocker, legal_type, (RequestTracker.RequestType.CHANGE_PARTY, {"parties": [{}]}), ( RequestTracker.RequestType.CHANGE_DELIVERY_ADDRESS, - { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - } - }, + {"offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}}, ), ( RequestTracker.RequestType.CHANGE_MAILING_ADDRESS, - { - "offices": { - "businessOffice": {"mailingAddress": {}, "deliveryAddress": {}} - } - }, + {"offices": {"businessOffice": {"mailingAddress": {}, "deliveryAddress": {}}}}, ), ], ) def test_retry_correction(app, session, client, mocker, request_type, data): """Test retry correction of SP/GP.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - "SP", identifier=identifier, tax_id="993775204BC0001" - ) + filing_id, legal_entity_id = create_registration_data("SP", identifier=identifier, tax_id="993775204BC0001") json_filing = {"filing": {"header": {"name": "correction"}, "correction": {}}} json_filing["filing"]["correction"] = data filing = create_filing(json_filing=json_filing, legal_entity_id=legal_entity_id) @@ -275,9 +248,7 @@ def side_effect(transaction_id, office_id, address_type): "entity_bn.bn_processors.correction.has_previous_address", side_effect=side_effect, ) - mocker.patch( - "entity_bn.bn_processors.correction.has_party_name_changed", return_value=True - ) + mocker.patch("entity_bn.bn_processors.correction.has_party_name_changed", return_value=True) message_id = str(uuid.uuid4()) for _ in range(10): diff --git a/queue_services/entity-bn/tests/unit/bn_processors/test_dissolution_or_put_back_on.py b/queue_services/entity-bn/tests/unit/bn_processors/test_dissolution_or_put_back_on.py index eb1601632f..36a9a7f556 100644 --- a/queue_services/entity-bn/tests/unit/bn_processors/test_dissolution_or_put_back_on.py +++ b/queue_services/entity-bn/tests/unit/bn_processors/test_dissolution_or_put_back_on.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. """The Test Suites to ensure that the dissolution/putBackOn is operating correctly.""" -from http import HTTPStatus import uuid import xml.etree.ElementTree as Et +from http import HTTPStatus import pytest from legal_api.models import RequestTracker @@ -22,8 +22,7 @@ from entity_bn.bn_processors import bn_note from tests.unit import create_filing, create_registration_data, get_json_message - -message_type = f"bc.registry.business." +message_type = "bc.registry.business." @pytest.mark.parametrize( @@ -38,9 +37,7 @@ def test_change_of_status(app, session, client, mocker, legal_type, filing_type): """Test inform cra about change of status of SP/GP.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - legal_type, identifier=identifier, tax_id="993775204BC0001" - ) + filing_id, legal_entity_id = create_registration_data(legal_type, identifier=identifier, tax_id="993775204BC0001") json_filing = {"filing": {"header": {"name": filing_type}}} if filing_type == "dissolution": json_filing["filing"][filing_type] = {"dissolutionType": "voluntary"} @@ -67,9 +64,7 @@ def side_effect(input_xml): ) message_id = str(uuid.uuid4()) - json_data = get_json_message( - filing_id, identifier, message_id, f"{message_type}{filing_type}" - ) + json_data = get_json_message(filing_id, identifier, message_id, f"{message_type}{filing_type}") rv = client.post("/", json=json_data) assert rv.status_code == HTTPStatus.OK @@ -102,14 +97,10 @@ def side_effect(input_xml): ("GP", "putBackOn", "993775204"), ], ) -def test_bn15_not_available_change_of_status( - app, session, client, mocker, legal_type, filing_type, bn9 -): +def test_bn15_not_available_change_of_status(app, session, client, mocker, legal_type, filing_type, bn9): """Skip cra call when BN15 is not available while doing a change of status SP/GP.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - legal_type, identifier=identifier, bn9=bn9 - ) + filing_id, legal_entity_id = create_registration_data(legal_type, identifier=identifier, bn9=bn9) json_filing = {"filing": {"header": {"name": filing_type}}} if filing_type == "dissolution": @@ -121,9 +112,7 @@ def test_bn15_not_available_change_of_status( filing_id = filing.id message_id = str(uuid.uuid4()) - json_data = get_json_message( - filing_id, identifier, message_id, f"{message_type}{filing_type}" - ) + json_data = get_json_message(filing_id, identifier, message_id, f"{message_type}{filing_type}") rv = client.post("/", json=json_data) assert rv.status_code == HTTPStatus.OK @@ -143,9 +132,7 @@ def test_bn15_not_available_change_of_status( def test_retry_change_of_status(app, session, client, mocker): """Test retry change of status of SP/GP.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - "SP", identifier=identifier, tax_id="993775204BC0001" - ) + filing_id, legal_entity_id = create_registration_data("SP", identifier=identifier, tax_id="993775204BC0001") json_filing = { "filing": { "header": {"name": "dissolution"}, @@ -164,9 +151,7 @@ def test_retry_change_of_status(app, session, client, mocker): message_id = str(uuid.uuid4()) for _ in range(10): - json_data = get_json_message( - filing_id, identifier, message_id, f"{message_type}dissolution" - ) + json_data = get_json_message(filing_id, identifier, message_id, f"{message_type}dissolution") rv = client.post("/", json=json_data) if rv.status_code == HTTPStatus.OK: diff --git a/queue_services/entity-bn/tests/unit/bn_processors/test_registration.py b/queue_services/entity-bn/tests/unit/bn_processors/test_registration.py index 6a5bc901af..a04450ba80 100644 --- a/queue_services/entity-bn/tests/unit/bn_processors/test_registration.py +++ b/queue_services/entity-bn/tests/unit/bn_processors/test_registration.py @@ -12,15 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. """The Test Suites to ensure that the registration is operating correctly.""" -from http import HTTPStatus import uuid import xml.etree.ElementTree as Et +from http import HTTPStatus import pytest from legal_api.models import LegalEntity, RequestTracker -from tests.unit import get_json_message, create_registration_data - +from tests.unit import create_registration_data, get_json_message acknowledgement_response = """ @@ -43,18 +42,14 @@ def test_registration(app, session, client, mocker, legal_type): """Test inform cra about new SP/GP registration.""" identifier = "FM1234567" - filing_id, legal_entity_id = create_registration_data( - legal_type, identifier=identifier - ) + filing_id, legal_entity_id = create_registration_data(legal_type, identifier=identifier) def side_effect(input_xml): root = Et.fromstring(input_xml) if root.tag == "SBNCreateProgramAccountRequest": return 200, acknowledgement_response - mocker.patch( - "entity_bn.bn_processors.registration.request_bn_hub", side_effect=side_effect - ) + mocker.patch("entity_bn.bn_processors.registration.request_bn_hub", side_effect=side_effect) # mocker.patch("entity_bn.bn_processors.registration.publish_event") business_number = "993775204" @@ -123,9 +118,7 @@ def side_effect(input_xml): if root.tag == "SBNCreateProgramAccountRequest": return 200, "" if is_inform_cra else acknowledgement_response - mocker.patch( - "entity_bn.bn_processors.registration.request_bn_hub", side_effect=side_effect - ) + mocker.patch("entity_bn.bn_processors.registration.request_bn_hub", side_effect=side_effect) mocker.patch( "entity_bn.bn_processors.registration._get_program_account", return_value=( diff --git a/queue_services/entity-bn/tests/unit/test_version.py b/queue_services/entity-bn/tests/unit/test_version.py index a984a82432..8249808e1a 100644 --- a/queue_services/entity-bn/tests/unit/test_version.py +++ b/queue_services/entity-bn/tests/unit/test_version.py @@ -20,7 +20,6 @@ from entity_bn import utils - PACKAGE_NAME = "entity_bn" diff --git a/queue_services/entity-emailer/__init__.py b/queue_services/entity-emailer/__init__.py new file mode 100644 index 0000000000..c09781299b --- /dev/null +++ b/queue_services/entity-emailer/__init__.py @@ -0,0 +1,15 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Init.""" +# pylint: disable=invalid-name diff --git a/queue_services/entity-emailer/poetry.lock b/queue_services/entity-emailer/poetry.lock index 67de777f8b..5c5d485831 100644 --- a/queue_services/entity-emailer/poetry.lock +++ b/queue_services/entity-emailer/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -106,6 +106,17 @@ files = [ {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, ] +[[package]] +name = "astroid" +version = "3.0.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, +] + [[package]] name = "asyncio-nats-client" version = "0.11.5" @@ -168,6 +179,29 @@ setuptools = {version = "*", markers = "python_version >= \"3.12\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "bandit" +version = "1.7.7" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, + {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -188,29 +222,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -222,7 +260,7 @@ platformdirs = ">=2" [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -458,6 +496,70 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "datedelta" version = "1.4" @@ -469,6 +571,20 @@ files = [ {file = "datedelta-1.4.tar.gz", hash = "sha256:3f1ef319ead642a76a3cab731917bf14a0ced0d91943f33ff57ae615837cab97"}, ] +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + [[package]] name = "dpath" version = "2.1.6" @@ -512,6 +628,38 @@ files = [ [package.extras] tests = ["coverage", "coveralls", "dill", "mock", "nose"] +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + [[package]] name = "flask" version = "2.3.3" @@ -966,6 +1114,20 @@ files = [ [package.dependencies] arrow = ">=0.15.0" +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -1244,6 +1406,30 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.3" @@ -1313,6 +1499,28 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "minio" version = "7.2.0" @@ -1363,6 +1571,17 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + [[package]] name = "pg8000" version = "1.30.3" @@ -1523,6 +1742,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -1557,6 +1778,17 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + [[package]] name = "pycountry" version = "22.3.5" @@ -1674,6 +1906,59 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pylint" +version = "3.0.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, +] + +[package.dependencies] +astroid = ">=3.0.1,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pypdf2" version = "3.0.1" @@ -1726,6 +2011,24 @@ pluggy = ">=0.12,<2.0" [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "pytest-mock" version = "3.12.0" @@ -1803,6 +2106,66 @@ files = [ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "referencing" version = "0.30.2" @@ -1924,6 +2287,24 @@ files = [ {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, ] +[[package]] +name = "rich" +version = "13.7.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "rpds-py" version = "0.12.0" @@ -2325,6 +2706,20 @@ test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3 timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, + {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + [[package]] name = "strict-rfc3339" version = "0.7" @@ -2335,6 +2730,17 @@ files = [ {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, ] +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, +] + [[package]] name = "types-python-dateutil" version = "2.8.19.14" @@ -2423,4 +2829,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "257a9a2fe3a7cdf8dc9a0802b2116f2ec79317abc66b90b8cb7102ee616f1485" +content-hash = "8fa46ba9ff8f7434ec27d1cc356563783901798736ec38668f8eac9924b5e327" diff --git a/queue_services/entity-emailer/pyproject.toml b/queue_services/entity-emailer/pyproject.toml index a753cc62e3..0974425752 100644 --- a/queue_services/entity-emailer/pyproject.toml +++ b/queue_services/entity-emailer/pyproject.toml @@ -23,12 +23,135 @@ jinja2 = "^3.1.2" werkzeug = "^2.3.7" [tool.poetry.group.dev.dependencies] -black = "^23.3.0" pytest = "^7.4.0" pytest-mock = "^3.11.1" +pytest-cov = "^4.0.0" requests-mock = "^1.11.0" psycopg2 = "^2.9.7" sqlalchemy = "^2.0.20" +black = "^23.12.1" +pylint = "^3.0.3" +bandit = "^1.7.6" +flake8-pyproject = "^1.2.3" +isort = "^5.13.2" + +[tool.bandit] +exclude_dirs = [".venv","tests"] +skips = ["B104"] + +[tool.flake8] +ignore = ["F401","E402", "Q000", "E203", "W503"] +exclude = [ + ".venv", + ".git", + ".history", + "devops", + "*migrations*", +] +per-file-ignores = [ + "__init__.py:F401", + "*.py:B902" +] +max-line-length = 120 +docstring-min-length=10 +count = true + +[tool.black] +target-version = ["py310", "py311", "py312"] +line-length = 120 +include = '\.pyi?$' +extend-exclude = ''' +/( + # The following are specific to Black, you probably don't want those. + migrations + | devops +)/ +''' + +[tool.isort] +atomic = true +profile = "black" +line_length = 120 +skip_gitignore = true +skip_glob = ["migrations", "devops"] + +[tool.pylint.main] +fail-under = 10 +max-line-length = 120 +ignore = [ "migrations", "devops", "tests"] +ignore-patterns = ["^\\.#"] +ignored-modules= ["flask_sqlalchemy", "sqlalchemy", "SQLAlchemy" , "alembic", "scoped_session"] +ignored-classes= "scoped_session" +ignore-long-lines = "^\\s*(# )??$" +extension-pkg-whitelist = "pydantic" +notes = ["FIXME","XXX","TODO"] +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +disable = "C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101" +argument-naming-style = "snake_case" +attr-naming-style = "snake_case" +class-attribute-naming-style = "any" +class-const-naming-style = "UPPER_CASE" +class-naming-style = "PascalCase" +const-naming-style = "UPPER_CASE" +function-naming-style = "snake_case" +inlinevar-naming-style = "any" +method-naming-style = "snake_case" +module-naming-style = "any" +variable-naming-style = "snake_case" +docstring-min-length = -1 +good-names = ["i", "j", "k", "ex", "Run", "_"] +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +valid-classmethod-first-arg = ["cls"] +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pytest.ini_options] +minversion = "2.0" +testpaths = [ + "tests", +] +addopts = "--verbose --strict -p no:warnings --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml" +python_files = [ + "test*.py" +] +norecursedirs = [ + ".git", ".tox", "venv*", "requirements*", "build", +] +log_cli = true +log_cli_level = "1" +filterwarnings = [ + "ignore::UserWarning" +] +markers = [ + "slow", + "serial", +] + +[tool.coverage.run] +branch = true +source = [ + "src/entity-emailer", +] +omit = [ + "wsgi.py", + "gunicorn_config.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "from", + "import", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + 'if __name__ == "__main__":', +] [build-system] requires = ["poetry-core"] diff --git a/queue_services/entity-emailer/src/entity_emailer/__init__.py b/queue_services/entity-emailer/src/entity_emailer/__init__.py index b2fb85931c..d49fce951a 100644 --- a/queue_services/entity-emailer/src/entity_emailer/__init__.py +++ b/queue_services/entity-emailer/src/entity_emailer/__init__.py @@ -43,8 +43,7 @@ from legal_api.utils.run_version import get_run_version from sentry_sdk.integrations.flask import FlaskIntegration -from .config import Config -from .config import Production +from .config import Config, Production from .resources import register_endpoints from .services import queue diff --git a/queue_services/entity-emailer/src/entity_emailer/config.py b/queue_services/entity-emailer/src/entity_emailer/config.py index 859cefe8b1..c18394925b 100644 --- a/queue_services/entity-emailer/src/entity_emailer/config.py +++ b/queue_services/entity-emailer/src/entity_emailer/config.py @@ -43,7 +43,6 @@ from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) @@ -88,11 +87,11 @@ class Config: # pylint: disable=too-few-public-methods # POSTGRESQL if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): - SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" - else: SQLALCHEMY_DATABASE_URI = ( - f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" + f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" ) + else: + SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" # variables LEGISLATIVE_TIMEZONE = os.getenv("LEGISLATIVE_TIMEZONE", "America/Vancouver") @@ -129,12 +128,8 @@ class Config: # pylint: disable=too-few-public-methods NAMEX_SERVICE_CLIENT_SECRET = os.getenv("KEYCLOAK_CLIENT_ID") GCP_AUTH_KEY = os.getenv("GCP_AUTH_KEY", None) - AUDIENCE = os.getenv( - "AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber" - ) - PUBLISHER_AUDIENCE = os.getenv( - "PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher" - ) + AUDIENCE = os.getenv("AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber") + PUBLISHER_AUDIENCE = os.getenv("PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher") NAME_REQUEST_URL = os.getenv("NAME_REQUEST_URL", "") DECIDE_BUSINESS_URL = os.getenv("DECIDE_BUSINESS_URL", "") @@ -167,9 +162,7 @@ class Testing(Config): # pylint: disable=too-few-public-methods DEPLOYMENT_ENV = "testing" LEGAL_API_URL = "https://legal-api-url/" PAY_API_URL = "https://pay-api-url/" - SQLALCHEMY_DATABASE_URI = ( - f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" - ) + SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" class Production(Config): # pylint: disable=too-few-public-methods diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/__init__.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/__init__.py index 5b90add7c2..9971ea9347 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/__init__.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/__init__.py @@ -22,10 +22,10 @@ from typing import Tuple import requests -from flask import current_app -from flask import request -from legal_api.models import LegalEntity, Filing +from flask import current_app, request +from legal_api.models import Filing, LegalEntity from legal_api.utils.legislation_datetime import LegislationDatetime + from entity_emailer.services.logging import structured_log @@ -42,40 +42,29 @@ def get_filing_info(filing_id: str) -> Tuple[Filing, dict, dict, str, str]: leg_tmz_filing_date = LegislationDatetime.as_legislation_timezone(filing_date) hour = leg_tmz_filing_date.strftime("%I").lstrip("0") am_pm = leg_tmz_filing_date.strftime("%p").lower() - leg_tmz_filing_date = leg_tmz_filing_date.strftime( - f"%B %d, %Y at {hour}:%M {am_pm} Pacific time" - ) + leg_tmz_filing_date = leg_tmz_filing_date.strftime(f"%B %d, %Y at {hour}:%M {am_pm} Pacific time") effective_date = datetime.fromisoformat(filing.effective_date.isoformat()) leg_tmz_effective_date = LegislationDatetime.as_legislation_timezone(effective_date) hour = leg_tmz_effective_date.strftime("%I").lstrip("0") am_pm = leg_tmz_effective_date.strftime("%p").lower() - leg_tmz_effective_date = leg_tmz_effective_date.strftime( - f"%B %d, %Y at {hour}:%M {am_pm} Pacific time" - ) + leg_tmz_effective_date = leg_tmz_effective_date.strftime(f"%B %d, %Y at {hour}:%M {am_pm} Pacific time") return filing, business_json, leg_tmz_filing_date, leg_tmz_effective_date -def get_recipients( - option: str, filing_json: dict, token: str = None, filing_type: str = None -) -> str: +def get_recipients(option: str, filing_json: dict, token: str = None, filing_type: str = None) -> str: """Get the recipients for the email output.""" recipients = "" filing_type = filing_type if filing_type else "incorporationApplication" if filing_json["filing"].get(filing_type): recipients = filing_json["filing"][filing_type]["contactPoint"]["email"] - if ( - option in [Filing.Status.PAID.value, "bn"] - and filing_json["filing"]["header"]["name"] == filing_type - ): + if option in [Filing.Status.PAID.value, "bn"] and filing_json["filing"]["header"]["name"] == filing_type: parties = filing_json["filing"][filing_type].get("parties") comp_party_email = None for party in parties: for role in party["roles"]: - if role["roleType"] == "Completing Party" and ( - comp_party_email := party["officer"].get("email") - ): + if role["roleType"] == "Completing Party" and (comp_party_email := party["officer"].get("email")): recipients = f"{recipients}, {comp_party_email}" break else: @@ -91,9 +80,7 @@ def get_recipient_from_auth(identifier: str, token: str) -> str: """Get the recipients for the email output from auth.""" headers = {"Accept": "application/json", "Authorization": f"Bearer {token}"} - contact_info = requests.get( - f'{current_app.config.get("AUTH_URL")}/entities/{identifier}', headers=headers - ) + contact_info = requests.get(f'{current_app.config.get("AUTH_URL")}/entities/{identifier}', headers=headers) contacts = contact_info.json()["contacts"] if not contacts: @@ -102,7 +89,7 @@ def get_recipient_from_auth(identifier: str, token: str) -> str: "ERROR", f"Queue Error: No email in business {identifier} profile to send output to.", ) - raise Exception + raise Exception # pylint: disable=broad-exception-raised return contacts[0]["email"] @@ -122,9 +109,7 @@ def get_user_from_auth(user_name: str, token: str) -> requests.Response: """Get user from auth.""" headers = {"Accept": "application/json", "Authorization": f"Bearer {token}"} - user_info = requests.get( - f'{current_app.config.get("AUTH_URL")}/users/{user_name}', headers=headers - ) + user_info = requests.get(f'{current_app.config.get("AUTH_URL")}/users/{user_name}', headers=headers) return user_info @@ -160,11 +145,7 @@ def substitute_template_parts(template_code: str) -> str: # substitute template parts - marked up by [[filename]] for template_part in template_parts: - template_part_code = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/common/{template_part}.html' - ).read_text() - template_code = template_code.replace( - "[[{}.html]]".format(template_part), template_part_code - ) + template_part_code = Path(f'{current_app.config.get("TEMPLATE_PATH")}/common/{template_part}.html').read_text() + template_code = template_code.replace("[[{}.html]]".format(template_part), template_part_code) return template_code diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/affiliation_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/affiliation_notification.py index 5906f9db43..13adf7b281 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/affiliation_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/affiliation_notification.py @@ -17,21 +17,14 @@ import re from pathlib import Path -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template +from entity_emailer.email_processors import get_filing_info, get_recipients, substitute_template_parts from entity_emailer.services.logging import structured_log -from entity_emailer.email_processors import ( - get_filing_info, - get_recipients, - substitute_template_parts, -) -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, , too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, , too-many-branches """Build the email for Affiliation notification.""" structured_log(request, "DEBUG", f"filing_notification: {email_info}") @@ -41,13 +34,9 @@ def process( ) filing_type = filing.filing_type status = filing.status - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/BC-ALT-DRAFT.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/BC-ALT-DRAFT.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_extension_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_extension_notification.py index 46df7398a4..4d090fc331 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_extension_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_extension_notification.py @@ -20,36 +20,29 @@ from pathlib import Path import requests -from entity_queue_common.service_utils import logger -from flask import current_app +from flask import current_app, request from jinja2 import Template from legal_api.models import Filing, LegalEntity from entity_emailer.email_processors import get_filing_info, get_recipient_from_auth, substitute_template_parts +from entity_emailer.services.logging import structured_log -def _get_pdfs( - token: str, - business: dict, - filing: Filing, - filing_date_time: str, - effective_date: str) -> list: +def _get_pdfs(token: str, business: dict, filing: Filing, filing_date_time: str, effective_date: str) -> list: # pylint: disable=too-many-locals, too-many-branches, too-many-statements, too-many-arguments """Get the pdfs for the AGM Extension output.""" pdfs = [] attach_order = 1 - headers = { - "Accept": "application/pdf", - "Authorization": f"Bearer {token}" - } + headers = {"Accept": "application/pdf", "Authorization": f"Bearer {token}"} # add filing pdf filing_pdf = requests.get( f'{current_app.config.get("LEGAL_API_URL")}/businesses/{business["identifier"]}/filings/{filing.id}' - "?type=letterOfAgmExtension", headers=headers + "?type=letterOfAgmExtension", + headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - logger.error("Failed to get pdf for filing: %s", filing.id) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) pdfs.append( @@ -57,7 +50,7 @@ def _get_pdfs( "fileName": "Letter of AGM Extension Approval.pdf", "fileBytes": filing_pdf_encoded.decode("utf-8"), "fileUrl": "", - "attachOrder": attach_order + "attachOrder": attach_order, } ) attach_order += 1 @@ -72,12 +65,12 @@ def _get_pdfs( "filingDateTime": filing_date_time, "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "" + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, - headers=headers + headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - logger.error("Failed to get receipt pdf for filing: %s", filing.id) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -85,7 +78,7 @@ def _get_pdfs( "fileName": "Receipt.pdf", "fileBytes": receipt_encoded.decode("utf-8"), "fileUrl": "", - "attachOrder": attach_order + "attachOrder": attach_order, } ) attach_order += 1 @@ -95,16 +88,15 @@ def _get_pdfs( def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, too-many-branches """Build the email for AGM Extension notification.""" - logger.debug("agm_extension_notification: %s", email_info) + + structured_log(request, "DEBUG", f"agm_extension_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/AGM-EXT-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/AGM-EXT-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -115,10 +107,10 @@ def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-l header=(filing.json)["filing"]["header"], filing_date_time=leg_tmz_filing_date, effective_date_time=leg_tmz_effective_date, - entity_dashboard_url=current_app.config.get("DASHBOARD_URL") + - (filing.json)["filing"]["business"].get("identifier", ""), + entity_dashboard_url=current_app.config.get("DASHBOARD_URL") + + (filing.json)["filing"]["business"].get("identifier", ""), email_header=filing_name.upper(), - filing_type=filing_type + filing_type=filing_type, ) # get attachments @@ -142,9 +134,5 @@ def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-l return { "recipients": recipients, "requestBy": "BCRegistries@gov.bc.ca", - "content": { - "subject": subject, - "body": f"{html_out}", - "attachments": pdfs - } + "content": {"subject": subject, "body": f"{html_out}", "attachments": pdfs}, } diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_location_change_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_location_change_notification.py index e205870fe6..83e5801a71 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_location_change_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/agm_location_change_notification.py @@ -20,36 +20,29 @@ from pathlib import Path import requests -from entity_queue_common.service_utils import logger -from flask import current_app +from flask import current_app, request from jinja2 import Template from legal_api.models import Filing, LegalEntity from entity_emailer.email_processors import get_filing_info, get_recipient_from_auth, substitute_template_parts +from entity_emailer.services.logging import structured_log -def _get_pdfs( - token: str, - business: dict, - filing: Filing, - filing_date_time: str, - effective_date: str) -> list: +def _get_pdfs(token: str, business: dict, filing: Filing, filing_date_time: str, effective_date: str) -> list: # pylint: disable=too-many-locals, too-many-branches, too-many-statements, too-many-arguments """Get the pdfs for the AGM Location Change output.""" pdfs = [] attach_order = 1 - headers = { - "Accept": "application/pdf", - "Authorization": f"Bearer {token}" - } + headers = {"Accept": "application/pdf", "Authorization": f"Bearer {token}"} # add filing pdf filing_pdf = requests.get( f'{current_app.config.get("LEGAL_API_URL")}/businesses/{business["identifier"]}/filings/{filing.id}' - "?type=letterOfAgmLocationChange", headers=headers + "?type=letterOfAgmLocationChange", + headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - logger.error("Failed to get pdf for filing: %s", filing.id) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) pdfs.append( @@ -57,7 +50,7 @@ def _get_pdfs( "fileName": "Letter of AGM Location Change Approval.pdf", "fileBytes": filing_pdf_encoded.decode("utf-8"), "fileUrl": "", - "attachOrder": attach_order + "attachOrder": attach_order, } ) attach_order += 1 @@ -72,12 +65,12 @@ def _get_pdfs( "filingDateTime": filing_date_time, "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "" + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, - headers=headers + headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - logger.error("Failed to get receipt pdf for filing: %s", filing.id) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -85,7 +78,7 @@ def _get_pdfs( "fileName": "Receipt.pdf", "fileBytes": receipt_encoded.decode("utf-8"), "fileUrl": "", - "attachOrder": attach_order + "attachOrder": attach_order, } ) attach_order += 1 @@ -95,16 +88,14 @@ def _get_pdfs( def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, too-many-branches """Build the email for AGM Location Change notification.""" - logger.debug("agm_location_change_notification: %s", email_info) + structured_log(request, "DEBUG", f"agm_location_change_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/AGM-LOCCHG-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/AGM-LOCCHG-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -115,10 +106,10 @@ def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-l header=(filing.json)["filing"]["header"], filing_date_time=leg_tmz_filing_date, effective_date_time=leg_tmz_effective_date, - entity_dashboard_url=current_app.config.get("DASHBOARD_URL") + - (filing.json)["filing"]["business"].get("identifier", ""), + entity_dashboard_url=current_app.config.get("DASHBOARD_URL") + + (filing.json)["filing"]["business"].get("identifier", ""), email_header=filing_name.upper(), - filing_type=filing_type + filing_type=filing_type, ) # get attachments @@ -142,9 +133,5 @@ def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-l return { "recipients": recipients, "requestBy": "BCRegistries@gov.bc.ca", - "content": { - "subject": subject, - "body": f"{html_out}", - "attachments": pdfs - } + "content": {"subject": subject, "body": f"{html_out}", "attachments": pdfs}, } diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/ar_reminder_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/ar_reminder_notification.py index f49f796df2..f26b5f7d1b 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/ar_reminder_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/ar_reminder_notification.py @@ -16,16 +16,12 @@ from pathlib import Path -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, CorpType +from legal_api.models import CorpType, LegalEntity +from entity_emailer.email_processors import get_recipient_from_auth, substitute_template_parts from entity_emailer.services.logging import structured_log -from entity_emailer.email_processors import ( - get_recipient_from_auth, - substitute_template_parts, -) def process(email_msg: dict, token: str, flag_on: bool) -> dict: @@ -34,9 +30,7 @@ def process(email_msg: dict, token: str, flag_on: bool) -> dict: ar_fee = email_msg["arFee"] ar_year = email_msg["arYear"] # get template and fill in parts - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/AR-REMINDER.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/AR-REMINDER.html').read_text() filled_template = substitute_template_parts(template) business = LegalEntity.find_by_internal_id(email_msg["businessId"]) corp_type = CorpType.find_by_id(business.entity_type) @@ -49,7 +43,7 @@ def process(email_msg: dict, token: str, flag_on: bool) -> dict: ar_year=ar_year, entity_type=corp_type.full_desc, entity_dashboard_url=current_app.config.get("DASHBOARD_URL") + business.identifier, - disable_specific_service_provider=flag_on + disable_specific_service_provider=flag_on, ) # get recipients diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/bn_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/bn_notification.py index ece2907db8..6f8fcd309c 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/bn_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/bn_notification.py @@ -16,17 +16,12 @@ from pathlib import Path -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, CorpType, Filing, PartyRole +from legal_api.models import CorpType, Filing, LegalEntity, PartyRole +from entity_emailer.email_processors import get_recipient_from_auth, get_recipients, substitute_template_parts from entity_emailer.services.logging import structured_log -from entity_emailer.email_processors import ( - get_recipient_from_auth, - get_recipients, - substitute_template_parts, -) def process(email_msg: dict) -> dict: @@ -45,9 +40,7 @@ def process(email_msg: dict) -> dict: LegalEntity.EntityTypes.PARTNERSHIP.value, ]: filing_type = "registration" - filing = Filing.get_a_businesses_most_recent_filing_of_a_type( - business.id, filing_type - ) + filing = Filing.get_a_businesses_most_recent_filing_of_a_type(business.id, filing_type) corp_type = CorpType.find_by_id(business.entity_type) # render template with vars @@ -58,9 +51,7 @@ def process(email_msg: dict) -> dict: ) # get recipients - recipients = get_recipients( - email_msg["option"], filing.filing_json, filing_type=filing_type - ) + recipients = get_recipients(email_msg["option"], filing.filing_json, filing_type=filing_type) return { "recipients": recipients, "requestBy": "BCRegistries@gov.bc.ca", @@ -77,9 +68,7 @@ def process_bn_move(email_msg: dict, token: str) -> dict: structured_log(request, "DEBUG", f"bn move notification: {email_msg}") # get template and fill in parts - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/BN-MOVE.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/BN-MOVE.html').read_text() filled_template = substitute_template_parts(template) # get filing and business json @@ -96,9 +85,7 @@ def process_bn_move(email_msg: dict, token: str) -> dict: ) recipients = [] - recipients.append( - get_recipient_from_auth(business.identifier, token) - ) # business email + recipients.append(get_recipient_from_auth(business.identifier, token)) # business email role = "" if business.entity_type == LegalEntity.EntityTypes.SOLE_PROP.value: diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/change_of_registration_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/change_of_registration_notification.py index 0f2f4c3620..71924ee6ff 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/change_of_registration_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/change_of_registration_notification.py @@ -20,17 +20,12 @@ from pathlib import Path import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, Filing, UserRoles +from legal_api.models import Filing, LegalEntity, UserRoles +from entity_emailer.email_processors import get_filing_info, get_user_email_from_auth, substitute_template_parts from entity_emailer.services.logging import structured_log -from entity_emailer.email_processors import ( - get_filing_info, - get_user_email_from_auth, - substitute_template_parts, -) def _get_pdfs( @@ -55,9 +50,7 @@ def _get_pdfs( headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) pdfs.append( @@ -77,20 +70,14 @@ def _get_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id - if business_data and business_data.tax_id - else "", + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -129,26 +116,16 @@ def _get_pdfs( return pdfs -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, , too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, , too-many-branches """Build the email for Change of Registration notification.""" - structured_log( - request, "DEBUG", f"change_of_registration_notification: {email_info}" - ) + structured_log(request, "DEBUG", f"change_of_registration_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/CHGREG-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/CHGREG-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -166,9 +143,7 @@ def process( ) # get attachments - pdfs = _get_pdfs( - status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = _get_pdfs(status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) # get recipients recipients = [] @@ -180,21 +155,13 @@ def process( break if filing.filing_json["filing"]["changeOfRegistration"].get("contactPoint"): - recipients.append( - filing.filing_json["filing"]["changeOfRegistration"]["contactPoint"][ - "email" - ] - ) + recipients.append(filing.filing_json["filing"]["changeOfRegistration"]["contactPoint"]["email"]) if filing.submitter_roles and UserRoles.staff in filing.submitter_roles: # when staff do filing documentOptionalEmail may contain completing party email - recipients.append( - filing.filing_json["filing"]["header"].get("documentOptionalEmail") - ) + recipients.append(filing.filing_json["filing"]["header"].get("documentOptionalEmail")) else: - recipients.append( - get_user_email_from_auth(filing.filing_submitter.username, token) - ) + recipients.append(get_user_email_from_auth(filing.filing_submitter.username, token)) recipients = list(set(recipients)) recipients = ", ".join(filter(None, recipients)).strip() diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/consent_continuation_out_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/consent_continuation_out_notification.py index 17ef821375..f1be31d10a 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/consent_continuation_out_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/consent_continuation_out_notification.py @@ -20,17 +20,12 @@ from pathlib import Path import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, Filing, UserRoles +from legal_api.models import Filing, LegalEntity, UserRoles +from entity_emailer.email_processors import get_filing_info, get_recipient_from_auth, substitute_template_parts from entity_emailer.services.logging import structured_log -from entity_emailer.email_processors import ( - get_filing_info, - get_recipient_from_auth, - substitute_template_parts, -) def _get_pdfs( @@ -74,20 +69,14 @@ def _get_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id - if business_data and business_data.tax_id - else "", + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -103,26 +92,16 @@ def _get_pdfs( return pdfs -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, too-many-branches """Build the email for Consent Continuation Out notification.""" - structured_log( - request, "DEBUG", f"consent_continuation_out_notification: {email_info}" - ) + structured_log(request, "DEBUG", f"consent_continuation_out_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/CCO-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/CCO-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -140,9 +119,7 @@ def process( ) # get attachments - pdfs = _get_pdfs( - token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = _get_pdfs(token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) # get recipients identifier = filing.filing_json["filing"]["business"]["identifier"] @@ -151,9 +128,7 @@ def process( if filing.submitter_roles and UserRoles.staff in filing.submitter_roles: # when staff file a CCO documentOptionalEmail may contain completing party email - recipients.append( - filing.filing_json["filing"]["header"].get("documentOptionalEmail") - ) + recipients.append(filing.filing_json["filing"]["header"].get("documentOptionalEmail")) recipients = list(set(recipients)) recipients = ", ".join(filter(None, recipients)).strip() diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/continuation_out_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/continuation_out_notification.py index e8e265ae94..385eca2225 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/continuation_out_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/continuation_out_notification.py @@ -20,17 +20,12 @@ from pathlib import Path import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, Filing, UserRoles +from legal_api.models import Filing, LegalEntity, UserRoles +from entity_emailer.email_processors import get_filing_info, get_recipient_from_auth, substitute_template_parts from entity_emailer.services.logging import structured_log -from entity_emailer.email_processors import ( - get_filing_info, - get_recipient_from_auth, - substitute_template_parts, -) def _get_pdfs( @@ -54,20 +49,14 @@ def _get_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id - if business_data and business_data.tax_id - else "", + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -83,24 +72,16 @@ def _get_pdfs( return pdfs -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, too-many-branches """Build the email for Continuation Out notification.""" structured_log(request, "DEBUG", f"continuation_out_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/CO-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/CO-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -118,9 +99,7 @@ def process( ) # get attachments - pdfs = _get_pdfs( - token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = _get_pdfs(token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) # get recipients identifier = filing.filing_json["filing"]["business"]["identifier"] @@ -129,9 +108,7 @@ def process( if filing.submitter_roles and UserRoles.staff in filing.submitter_roles: # when staff file a CO documentOptionalEmail may contain completing party email - recipients.append( - filing.filing_json["filing"]["header"].get("documentOptionalEmail") - ) + recipients.append(filing.filing_json["filing"]["header"].get("documentOptionalEmail")) recipients = list(set(recipients)) recipients = ", ".join(filter(None, recipients)).strip() diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/correction_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/correction_notification.py index 0f75369f06..51e6a3650b 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/correction_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/correction_notification.py @@ -21,15 +21,14 @@ from typing import Optional import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template from legal_api.core.filing_helper import is_special_resolution_correction_by_filing_json from legal_api.models import Filing -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import get_filing_info, substitute_template_parts from entity_emailer.email_processors.special_resolution_helper import get_completed_pdfs +from entity_emailer.services.logging import structured_log def _get_pdfs( @@ -47,8 +46,9 @@ def _get_pdfs( attach_order = 1 headers = {"Accept": "application/pdf", "Authorization": f"Bearer {token}"} entity_type = business.get("legalType", None) - is_cp_special_resolution = entity_type == 'CP' and is_special_resolution_correction_by_filing_json( - filing.filing_json['filing']) + is_cp_special_resolution = entity_type == "CP" and is_special_resolution_correction_by_filing_json( + filing.filing_json["filing"] + ) if status == Filing.Status.PAID.value: # add filing pdf @@ -58,9 +58,7 @@ def _get_pdfs( headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) pdfs.append( @@ -79,18 +77,14 @@ def _get_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), "businessNumber": business.get("taxId", ""), }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -135,9 +129,7 @@ def _get_pdfs( headers=headers, ) if noa.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get noa pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get noa pdf for filing: {filing.id}") else: noa_encoded = base64.b64encode(noa.content) pdfs.append( @@ -152,29 +144,32 @@ def _get_pdfs( elif is_cp_special_resolution: rules_changed = bool(filing.filing_json["filing"]["correction"].get("rulesFileKey")) memorandum_changed = bool(filing.filing_json["filing"]["correction"].get("memorandumFileKey")) - pdfs = get_completed_pdfs(token, business, filing, name_changed, - rules_changed=rules_changed, memorandum_changed=memorandum_changed) + pdfs = get_completed_pdfs( + token, + business, + filing, + name_changed, + rules_changed=rules_changed, + memorandum_changed=memorandum_changed, + ) return pdfs +# pylint: disable-next=too-many-arguments def _get_template( prefix: str, status: str, filing_type: str, - filing: Filing, # pylint: disable=too-many-arguments + filing: Filing, business: dict, leg_tmz_filing_date: str, leg_tmz_effective_date: str, name_changed: bool, ) -> str: """Return rendered template.""" - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/{prefix}-CRCTN-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/{prefix}-CRCTN-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -185,8 +180,7 @@ def _get_template( header=(filing.json)["filing"]["header"], filing_date_time=leg_tmz_filing_date, effective_date_time=leg_tmz_effective_date, - entity_dashboard_url=current_app.config.get("DASHBOARD_URL") - + business.get("identifier", ""), + entity_dashboard_url=current_app.config.get("DASHBOARD_URL") + business.get("identifier", ""), email_header=filing_name.upper(), filing_type=filing_type, name_changed=name_changed, @@ -205,9 +199,7 @@ def _get_recipients(filing: Filing) -> list: break if filing.filing_json["filing"]["correction"].get("contactPoint"): - recipients.append( - filing.filing_json["filing"]["correction"]["contactPoint"]["email"] - ) + recipients.append(filing.filing_json["filing"]["correction"]["contactPoint"]["email"]) recipients = list(set(recipients)) recipients = list(filter(None, recipients)) @@ -217,9 +209,9 @@ def _get_recipients(filing: Filing) -> list: def get_subject(status: str, prefix: str, business: dict) -> str: """Return subject.""" subjects = { - Filing.Status.PAID.value: "Confirmation of correction" - if prefix == "CP-SR" - else "Confirmation of Filing from the Business Registry", + Filing.Status.PAID.value: ( + "Confirmation of correction" if prefix == "CP-SR" else "Confirmation of Filing from the Business Registry" + ), Filing.Status.COMPLETED.value: "Correction Documents from the Business Registry", } @@ -233,17 +225,13 @@ def get_subject(status: str, prefix: str, business: dict) -> str: return subject -def process( - email_info: dict, token: str -) -> Optional[dict]: # pylint: disable=too-many-locals, , too-many-branches +def process(email_info: dict, token: str) -> Optional[dict]: # pylint: disable=too-many-locals, , too-many-branches """Build the email for Correction notification.""" structured_log(request, "DEBUG", f"correction_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) prefix = "BC" entity_type = business.get("legalType", None) @@ -252,18 +240,14 @@ def process( if entity_type in ["SP", "GP"]: prefix = "FIRM" elif entity_type in ["BC", "BEN", "CC", "ULC"]: - original_filing_type = filing.filing_json["filing"]["correction"][ - "correctedFilingType" - ] + original_filing_type = filing.filing_json["filing"]["correction"]["correctedFilingType"] if original_filing_type in [ "annualReport", "changeOfAddress", "changeOfDirectors", ]: return None - elif entity_type == "CP" and is_special_resolution_correction_by_filing_json( - filing.filing_json["filing"] - ): + elif entity_type == "CP" and is_special_resolution_correction_by_filing_json(filing.filing_json["filing"]): prefix = "CP-SR" name_changed = "requestType" in filing.filing_json["filing"]["correction"].get("nameRequest", {}) else: diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/dissolution_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/dissolution_notification.py index 61ed03dafa..402e28e1b9 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/dissolution_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/dissolution_notification.py @@ -20,18 +20,17 @@ from pathlib import Path import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, Filing, UserRoles +from legal_api.models import Filing, LegalEntity, UserRoles -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import ( get_filing_info, get_recipient_from_auth, get_user_email_from_auth, substitute_template_parts, ) +from entity_emailer.services.logging import structured_log def _get_pdfs( @@ -57,9 +56,7 @@ def _get_pdfs( headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) pdfs.append( @@ -79,20 +76,14 @@ def _get_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id - if business_data and business_data.tax_id - else "", + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -111,9 +102,7 @@ def _get_pdfs( headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) pdfs.append( @@ -203,25 +192,17 @@ def _get_pdfs( return pdfs -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, , too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, , too-many-branches """Build the email for Dissolution notification.""" structured_log(request, "DEBUG", f"dissolution_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) entity_type = business.get("legalType", None) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/DIS-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/DIS-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -239,9 +220,7 @@ def process( ) # get attachments - pdfs = _get_pdfs( - status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = _get_pdfs(status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) # get recipients identifier = filing.filing_json["filing"]["business"]["identifier"] @@ -250,13 +229,9 @@ def process( if filing.submitter_roles and UserRoles.staff in filing.submitter_roles: # when staff file a dissolution documentOptionalEmail may contain completing party email - recipients.append( - filing.filing_json["filing"]["header"].get("documentOptionalEmail") - ) + recipients.append(filing.filing_json["filing"]["header"].get("documentOptionalEmail")) else: - recipients.append( - get_user_email_from_auth(filing.filing_submitter.username, token) - ) + recipients.append(get_user_email_from_auth(filing.filing_submitter.username, token)) if entity_type in [ "SP", diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/filing_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/filing_notification.py index 2f9a2c9811..0ccedb488a 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/filing_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/filing_notification.py @@ -20,19 +20,17 @@ from pathlib import Path import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, Filing, UserRoles +from legal_api.models import Filing, LegalEntity, UserRoles -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import ( get_filing_info, get_recipients, get_user_email_from_auth, substitute_template_parts, ) - +from entity_emailer.services.logging import structured_log FILING_TYPE_CONVERTER = { "incorporationApplication": "IA", @@ -65,19 +63,11 @@ def _get_pdfs( headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) - file_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) - if ( - ar_date := filing.filing_json["filing"] - .get("annualReport", {}) - .get("annualReportDate") - ): + file_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) + if ar_date := filing.filing_json["filing"].get("annualReport", {}).get("annualReportDate"): file_name = f"{ar_date[:4]} {file_name}" pdfs.append( @@ -91,9 +81,9 @@ def _get_pdfs( attach_order += 1 # add receipt pdf if filing.filing_type == "incorporationApplication": - corp_name = filing.filing_json["filing"]["incorporationApplication"][ - "nameRequest" - ].get("legalName", "Numbered Company") + corp_name = filing.filing_json["filing"]["incorporationApplication"]["nameRequest"].get( + "legalName", "Numbered Company" + ) else: corp_name = business.get("legalName") @@ -102,18 +92,14 @@ def _get_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), "businessNumber": business.get("taxId", ""), }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -134,9 +120,7 @@ def _get_pdfs( headers=headers, ) if noa.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get noa pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get noa pdf for filing: {filing.id}") else: noa_encoded = base64.b64encode(noa.content) pdfs.append( @@ -224,9 +208,7 @@ def _get_pdfs( ) attach_order += 1 - if filing.filing_type == "alteration" and get_additional_info(filing).get( - "nameChange", False - ): + if filing.filing_type == "alteration" and get_additional_info(filing).get("nameChange", False): # add certificate of name change certificate = requests.get( f'{current_app.config.get("LEGAL_API_URL")}/businesses/{business["identifier"]}/filings/{filing.id}' @@ -263,26 +245,20 @@ def process( # pylint: disable=too-many-locals, too-many-statements, too-many-b # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) if filing_type == "incorporationApplication" and status == Filing.Status.PAID.value: business = (filing.json)["filing"]["incorporationApplication"]["nameRequest"] business["identifier"] = filing.temp_reg entity_type = business.get("legalType") - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) template = Path( f'{current_app.config.get("TEMPLATE_PATH")}/BC-{FILING_TYPE_CONVERTER[filing_type]}-{status}.html' ).read_text() filled_template = substitute_template_parts(template) # render template with vars - numbered_description = LegalEntity.BUSINESSES.get(entity_type, {}).get( - "numberedDescription" - ) + numbered_description = LegalEntity.BUSINESSES.get(entity_type, {}).get("numberedDescription") jnja_template = Template(filled_template, autoescape=True) filing_data = (filing.json)["filing"][f"{filing_type}"] html_out = jnja_template.render( @@ -292,8 +268,7 @@ def process( # pylint: disable=too-many-locals, too-many-statements, too-many-b header=(filing.json)["filing"]["header"], filing_date_time=leg_tmz_filing_date, effective_date_time=leg_tmz_effective_date, - entity_dashboard_url=current_app.config.get("DASHBOARD_URL") - + business.get("identifier", ""), + entity_dashboard_url=current_app.config.get("DASHBOARD_URL") + business.get("identifier", ""), email_header=filing_name.upper(), filing_type=filing_type, numbered_description=numbered_description, @@ -301,24 +276,18 @@ def process( # pylint: disable=too-many-locals, too-many-statements, too-many-b ) # get attachments - pdfs = _get_pdfs( - status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = _get_pdfs(status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) # get recipients recipients = get_recipients(status, filing.filing_json, token) if filing_type == "alteration": if filing.submitter_roles and UserRoles.staff in filing.submitter_roles: # when staff do filing documentOptionalEmail may contain completing party email - optional_email = filing.filing_json["filing"]["header"].get( - "documentOptionalEmail" - ) + optional_email = filing.filing_json["filing"]["header"].get("documentOptionalEmail") if optional_email: recipients = f"{recipients}, {optional_email}" else: - user_email = get_user_email_from_auth( - filing.filing_submitter.username, token - ) + user_email = get_user_email_from_auth(filing.filing_submitter.username, token) recipients = f"{recipients}, {user_email}" if not recipients: @@ -329,9 +298,7 @@ def process( # pylint: disable=too-many-locals, too-many-statements, too-many-b if filing_type == "incorporationApplication": subject = "Confirmation of Filing from the Business Registry" elif filing_type in ["changeOfAddress", "changeOfDirectors"]: - address_director = [x for x in ["Address", "Director"] if x in filing_type][ - 0 - ] + address_director = [x for x in ["Address", "Director"] if x in filing_type][0] subject = f"Confirmation of {address_director} Change" elif filing_type == "annualReport": subject = "Confirmation of Annual Report" @@ -348,9 +315,9 @@ def process( # pylint: disable=too-many-locals, too-many-statements, too-many-b subject = "Notification from the BC Business Registry" if filing.filing_type == "incorporationApplication": - business_name = filing.filing_json["filing"]["incorporationApplication"][ - "nameRequest" - ].get("businessName", None) + business_name = filing.filing_json["filing"]["incorporationApplication"]["nameRequest"].get( + "businessName", None + ) else: business_name = business.get("businessName", None) @@ -367,9 +334,7 @@ def get_additional_info(filing: Filing) -> dict: """Populate any additional info required for a filing type.""" additional_info = {} if filing.filing_type == "alteration": - meta_data_alteration = ( - filing.meta_data.get("alteration", {}) if filing.meta_data else {} - ) + meta_data_alteration = filing.meta_data.get("alteration", {}) if filing.meta_data else {} additional_info["nameChange"] = "toLegalName" in meta_data_alteration return additional_info diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/mras_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/mras_notification.py index adb80a557a..64cec0a32d 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/mras_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/mras_notification.py @@ -16,16 +16,11 @@ from pathlib import Path -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template +from entity_emailer.email_processors import get_filing_info, get_recipients, substitute_template_parts from entity_emailer.services.logging import structured_log -from entity_emailer.email_processors import ( - get_filing_info, - get_recipients, - substitute_template_parts, -) def process(email_msg: dict) -> dict: @@ -33,14 +28,10 @@ def process(email_msg: dict) -> dict: structured_log(request, "DEBUG", f"mras_notification: {email_msg}") filing_type = email_msg["type"] # get template and fill in parts - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/BC-MRAS.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/BC-MRAS.html').read_text() filled_template = substitute_template_parts(template) # get template info from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_msg["filingId"] - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_msg["filingId"]) # render template with vars jnja_template = Template(filled_template, autoescape=True) diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/name_request.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/name_request.py index 760773899f..fb01607338 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/name_request.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/name_request.py @@ -19,25 +19,20 @@ from pathlib import Path import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template from legal_api.services import NameXService -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import substitute_template_parts +from entity_emailer.services.logging import structured_log def process(email_info: dict) -> dict: """Build the email for Name Request notification.""" structured_log(request, "DEBUG", f"NR_notification: {email_info}") nr_number = email_info["identifier"] - payment_token = ( - email_info.get("data", {}).get("request", {}).get("paymentToken", "") - ) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/NR-PAID.html' - ).read_text() + payment_token = email_info.get("data", {}).get("request", {}).get("paymentToken", "") + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/NR-PAID.html').read_text() filled_template = substitute_template_parts(template) # render template with vars mail_template = Template(filled_template, autoescape=True) @@ -46,9 +41,7 @@ def process(email_info: dict) -> dict: # get nr data nr_response = NameXService.query_nr_number(nr_number) if nr_response.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get nr info for name request: {nr_number}" - ) + structured_log(request, "ERROR", f"Failed to get nr info for name request: {nr_number}") return {} nr_data = nr_response.json() @@ -85,9 +78,7 @@ def _get_pdfs(nr_id: str, payment_token: str) -> list: headers={"Accept": "application/json", "Authorization": f"Bearer {token}"}, ) if nr_payments.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get payment info for name request id: {nr_id}" - ) + structured_log(request, "ERROR", f"Failed to get payment info for name request id: {nr_id}") return [] # find specific payment corresponding to payment token @@ -110,9 +101,7 @@ def _get_pdfs(nr_id: str, payment_token: str) -> list: headers={"Accept": "application/pdf", "Authorization": f"Bearer {token}"}, ) if receipt.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for name request id: {nr_id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for name request id: {nr_id}") return [] # add receipt to pdfs diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/nr_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/nr_notification.py index 789c8b46aa..2678f0a77b 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/nr_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/nr_notification.py @@ -19,14 +19,13 @@ from http import HTTPStatus from pathlib import Path -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template from legal_api.services import NameXService from legal_api.utils.legislation_datetime import LegislationDatetime -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import substitute_template_parts +from entity_emailer.services.logging import structured_log class Option(Enum): @@ -75,9 +74,7 @@ def process(email_info: dict, option) -> dict: # pylint: disable-msg=too-many-l nr_response = NameXService.query_nr_number(nr_number) if nr_response.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get nr info for name request: {nr_number}" - ) + structured_log(request, "ERROR", f"Failed to get nr info for name request: {nr_number}") return {} nr_data = nr_response.json() @@ -90,9 +87,7 @@ def process(email_info: dict, option) -> dict: # pylint: disable-msg=too-many-l refund_value = "" if option == Option.REFUND.value: - refund_value = ( - email_info.get("data", {}).get("request", {}).get("refundValue", None) - ) + refund_value = email_info.get("data", {}).get("request", {}).get("refundValue", None) business_name = "" for n_item in nr_data["names"]: @@ -129,7 +124,7 @@ def process(email_info: dict, option) -> dict: # pylint: disable-msg=too-many-l decide_business_url=decide_business_url, corp_online_url=corp_online_url, form_page_url=form_page_url, - societies_url=societies_url + societies_url=societies_url, ) # get recipients diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/registration_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/registration_notification.py index e570a90501..abd1e927d9 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/registration_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/registration_notification.py @@ -20,13 +20,12 @@ from pathlib import Path import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template -from legal_api.models import LegalEntity, CorpType, Filing +from legal_api.models import CorpType, Filing, LegalEntity -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import get_filing_info, substitute_template_parts +from entity_emailer.services.logging import structured_log def _get_pdfs( @@ -52,20 +51,14 @@ def _get_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id - if business_data and business_data.tax_id - else "", + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -83,9 +76,7 @@ def _get_pdfs( headers=headers, ) if filing_pdf.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: filing_pdf_encoded = base64.b64encode(filing_pdf.content) pdfs.append( @@ -101,30 +92,20 @@ def _get_pdfs( return pdfs -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, , too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, , too-many-branches """Build the email for Registration notification.""" structured_log(request, "DEBUG", f"registration_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) - business = ( - business if business else filing.json["filing"]["registration"]["business"] - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) + business = business if business else filing.json["filing"]["registration"]["business"] identifier = business.get("identifier") name_request = filing.json["filing"]["registration"]["nameRequest"] corp_type = CorpType.find_by_id(name_request.get("legalType")) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/REG-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/REG-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) @@ -143,9 +124,7 @@ def process( ) # get attachments - pdfs = _get_pdfs( - status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = _get_pdfs(status, token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) # get recipients recipients = [] @@ -157,9 +136,7 @@ def process( break if status == Filing.Status.COMPLETED.value: - recipients.append( - filing.filing_json["filing"]["registration"]["contactPoint"]["email"] - ) + recipients.append(filing.filing_json["filing"]["registration"]["contactPoint"]["email"]) for party in filing.filing_json["filing"]["registration"]["parties"]: for role in party["roles"]: diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/restoration_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/restoration_notification.py index 600a7698e8..04fd47c1c7 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/restoration_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/restoration_notification.py @@ -19,13 +19,12 @@ from http import HTTPStatus import requests -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Environment, FileSystemLoader -from legal_api.models import LegalEntity, CorpType, Filing +from legal_api.models import CorpType, Filing, LegalEntity -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import get_filing_info +from entity_emailer.services.logging import structured_log def _get_completed_pdfs(token: str, business: dict, filing: Filing) -> list: @@ -42,9 +41,7 @@ def _get_completed_pdfs(token: str, business: dict, filing: Filing) -> list: headers=headers, ) if noa.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get noa pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get noa pdf for filing: {filing.id}") else: noa_encoded = base64.b64encode(noa.content) pdfs.append( @@ -63,9 +60,7 @@ def _get_completed_pdfs(token: str, business: dict, filing: Filing) -> list: headers=headers, ) if certificate.status_code != HTTPStatus.OK: - structured_log( - request, "ERROR", f"Failed to get certificate pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get certificate pdf for filing: {filing.id}") else: certificate_encoded = base64.b64encode(certificate.content) pdfs.append( @@ -119,20 +114,14 @@ def _get_paid_pdfs( json={ "corpName": corp_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": business_data.tax_id - if business_data and business_data.tax_id - else "", + "businessNumber": business_data.tax_id if business_data and business_data.tax_id else "", }, headers=headers, ) if receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(receipt.content) pdfs.append( @@ -148,20 +137,14 @@ def _get_paid_pdfs( return pdfs -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, , too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, , too-many-branches """Build the email for Restoration notification.""" structured_log(request, "DEBUG", f"registration_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) business = business if business else filing.json["filing"]["business"] identifier = business.get("identifier") filing_business = filing.json["filing"]["business"] @@ -175,9 +158,7 @@ def process( ) # look for a template in this format RES-fullRestoration-PAID.html # if the template doesn't exists use RES-PAID.html - template = jinja_env.select_template( - [f"RES-{restoration_type}-{status}.jinja2", f"RES-{status}.jinja2"] - ) + template = jinja_env.select_template([f"RES-{restoration_type}-{status}.jinja2", f"RES-{status}.jinja2"]) filing_data = filing.json["filing"][f"{filing_type}"] html_out = template.render( @@ -195,9 +176,7 @@ def process( # get attachments if status == Filing.Status.PAID.value: - pdfs = _get_paid_pdfs( - token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = _get_paid_pdfs(token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) if status == Filing.Status.COMPLETED.value: pdfs = _get_completed_pdfs(token, business, filing) diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_helper.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_helper.py index 86a527ee77..c22efaf9e4 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_helper.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_helper.py @@ -16,9 +16,8 @@ from http import HTTPStatus import requests -from flask import current_app -from flask import request -from legal_api.models import LegalEntity, Filing +from flask import current_app, request +from legal_api.models import Filing, LegalEntity from entity_emailer.services.logging import structured_log @@ -55,7 +54,7 @@ def get_completed_pdfs( structured_log( request, "ERROR", - f"Failed to get specialResolution pdf for filing: {filing.id}, status code: {special_resolution.status_code}", + f"Failed to get specialResolution pdf for filing: {filing.id}, status code: {special_resolution.status_code}", # noqa: E501 ) # Change of Name @@ -67,8 +66,7 @@ def get_completed_pdfs( ) if name_change.status_code == HTTPStatus.OK: - certified_name_change_encoded = base64.b64encode( - name_change.content) + certified_name_change_encoded = base64.b64encode(name_change.content) pdfs.append( { "fileName": "Certificate of Name Change.pdf", @@ -82,7 +80,7 @@ def get_completed_pdfs( structured_log( request, "ERROR", - f"Failed to get certificateOfNameChange pdf for filing: {filing.id}, status code: {name_change.status_code}", + f"Failed to get certificateOfNameChange pdf for filing: {filing.id}, status code: {name_change.status_code}", # noqa: E501 ) # Certified Rules @@ -109,7 +107,7 @@ def get_completed_pdfs( memorandum = requests.get( f'{current_app.config.get("LEGAL_API_URL")}/businesses/{business["identifier"]}/filings/{filing.id}' "?type=certifiedMemorandum", - headers=headers + headers=headers, ) if memorandum.status_code == HTTPStatus.OK: certified_memorandum_encoded = base64.b64encode(memorandum.content) @@ -118,7 +116,7 @@ def get_completed_pdfs( "fileName": "Certified Memorandum.pdf", "fileBytes": certified_memorandum_encoded.decode("utf-8"), "fileUrl": "", - "attachOrder": attach_order + "attachOrder": attach_order, } ) attach_order += 1 @@ -149,8 +147,7 @@ def get_paid_pdfs( ) if sr_filing_pdf.status_code != HTTPStatus.OK: - structured_log(request, "ERROR", - f"Failed to get pdf for filing: {filing.id}") + structured_log(request, "ERROR", f"Failed to get pdf for filing: {filing.id}") else: sr_filing_pdf_encoded = base64.b64encode(sr_filing_pdf.content) pdfs.append( @@ -171,21 +168,15 @@ def get_paid_pdfs( json={ "corpName": business_name, "filingDateTime": filing_date_time, - "effectiveDateTime": effective_date - if effective_date != filing_date_time - else "", + "effectiveDateTime": effective_date if effective_date != filing_date_time else "", "filingIdentifier": str(filing.id), - "businessNumber": origin_business.tax_id - if origin_business and origin_business.tax_id - else "", + "businessNumber": origin_business.tax_id if origin_business and origin_business.tax_id else "", }, headers=headers, ) if sr_receipt.status_code != HTTPStatus.CREATED: - structured_log( - request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}" - ) + structured_log(request, "ERROR", f"Failed to get receipt pdf for filing: {filing.id}") else: receipt_encoded = base64.b64encode(sr_receipt.content) pdfs.append( diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_notification.py index 5e29aeca7a..04681d162c 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_notification.py +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/special_resolution_notification.py @@ -17,52 +17,36 @@ import re from pathlib import Path -from flask import current_app -from flask import request +from flask import current_app, request from jinja2 import Template from legal_api.models import Filing, UserRoles -from entity_emailer.services.logging import structured_log - - from entity_emailer.email_processors import ( get_filing_info, get_recipient_from_auth, get_user_email_from_auth, substitute_template_parts, ) -from entity_emailer.email_processors.special_resolution_helper import ( - get_completed_pdfs, - get_paid_pdfs, -) +from entity_emailer.email_processors.special_resolution_helper import get_completed_pdfs, get_paid_pdfs +from entity_emailer.services.logging import structured_log -def process( - email_info: dict, token: str -) -> dict: # pylint: disable=too-many-locals, too-many-branches +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals, too-many-branches """Build the email for Special Resolution notification.""" structured_log(request, "DEBUG", f"special_resolution_notification: {email_info}") # get template and fill in parts filing_type, status = email_info["type"], email_info["option"] # get template vars from filing - filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info( - email_info["filingId"] - ) - filing_name = filing.filing_type[0].upper() + " ".join( - re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:]) - ) + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info["filingId"]) + filing_name = filing.filing_type[0].upper() + " ".join(re.findall("[a-zA-Z][^A-Z]*", filing.filing_type[1:])) - template = Path( - f'{current_app.config.get("TEMPLATE_PATH")}/SR-CP-{status}.html' - ).read_text() + template = Path(f'{current_app.config.get("TEMPLATE_PATH")}/SR-CP-{status}.html').read_text() filled_template = substitute_template_parts(template) # render template with vars jnja_template = Template(filled_template, autoescape=True) filing_data = (filing.json)["filing"][f"{filing_type}"] name_changed = filing.filing_json["filing"].get("changeOfName") - rules_changed = bool( - filing.filing_json["filing"].get("alteration", {}).get("rulesFileKey") - ) + rules_changed = bool(filing.filing_json["filing"].get("alteration", {}).get("rulesFileKey")) html_out = jnja_template.render( business=business, filing=filing_data, @@ -79,9 +63,7 @@ def process( # get attachments if status == Filing.Status.PAID.value: - pdfs = get_paid_pdfs( - token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date - ) + pdfs = get_paid_pdfs(token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) if status == Filing.Status.COMPLETED.value: pdfs = get_completed_pdfs(token, business, filing, name_changed, rules_changed) @@ -92,13 +74,9 @@ def process( if filing.submitter_roles and UserRoles.staff in filing.submitter_roles: # when staff file a dissolution documentOptionalEmail may contain completing party email - recipients.append( - filing.filing_json["filing"]["header"].get("documentOptionalEmail") - ) + recipients.append(filing.filing_json["filing"]["header"].get("documentOptionalEmail")) else: - recipients.append( - get_user_email_from_auth(filing.filing_submitter.username, token) - ) + recipients.append(get_user_email_from_auth(filing.filing_submitter.username, token)) recipients = list(set(recipients)) recipients = ", ".join(filter(None, recipients)).strip() diff --git a/queue_services/entity-emailer/src/entity_emailer/resources/__init__.py b/queue_services/entity-emailer/src/entity_emailer/resources/__init__.py index 256109db93..06de21e9fa 100644 --- a/queue_services/entity-emailer/src/entity_emailer/resources/__init__.py +++ b/queue_services/entity-emailer/src/entity_emailer/resources/__init__.py @@ -41,6 +41,7 @@ def register_endpoints(app: Flask): + """Register endpoints with the flask application""" # Allow base route to match with, and without a trailing slash app.url_map.strict_slashes = False diff --git a/queue_services/entity-emailer/src/entity_emailer/resources/worker.py b/queue_services/entity-emailer/src/entity_emailer/resources/worker.py index 9c20a42303..88a5e13810 100644 --- a/queue_services/entity-emailer/src/entity_emailer/resources/worker.py +++ b/queue_services/entity-emailer/src/entity_emailer/resources/worker.py @@ -38,18 +38,11 @@ from http import HTTPStatus import requests -from flask import Blueprint -from flask import Flask -from flask import current_app -from flask import request -from legal_api import db +from flask import Blueprint, current_app, request from legal_api.models import Filing from legal_api.services.bootstrap import AccountService from legal_api.services.flags import Flags -from simple_cloudevent import SimpleCloudEvent -from entity_emailer.services import queue -from entity_emailer.services.logging import structured_log from entity_emailer.email_processors import ( affiliation_notification, agm_extension_notification, @@ -69,6 +62,8 @@ restoration_notification, special_resolution_notification, ) +from entity_emailer.services import queue +from entity_emailer.services.logging import structured_log bp = Blueprint("worker", __name__) @@ -120,20 +115,14 @@ def worker(): # 4. Send email # ## - if ( - not email - or "recipients" not in email - or "content" not in email - or "body" not in email["content"] - ): + if not email or "recipients" not in email or "content" not in email or "body" not in email["content"]: # email object(s) is empty, take off queue structured_log(request, "INFO", "Send email: email object(s) is empty") return {}, HTTPStatus.OK if not email["recipients"] or not email["content"] or not email["content"]["body"]: # email object(s) is missing, take off queue - structured_log(request, "INFO", - "Send email: email object(s) is missing") + structured_log(request, "INFO", "Send email: email object(s) is missing") return {}, HTTPStatus.OK resp = send_email(email, token) @@ -152,20 +141,16 @@ def worker(): return {}, HTTPStatus.OK -def process_email( - email_msg: dict, token: str -): # pylint: disable=too-many-branches, too-many-statements +def process_email(email_msg: dict, token: str): # pylint: disable=too-many-branches, too-many-statements """Process the email contained in the submission.""" flags = Flags() if current_app.config.get("LD_SDK_KEY", None): flags.init_app(current_app) - structured_log(request, "DEBUG", - f"Attempting to process email: {email_msg}") + structured_log(request, "DEBUG", f"Attempting to process email: {email_msg}") etype = email_msg.get("type", None) if etype and etype == "bc.registry.names.request": - option = email_msg.get("data", {}).get( - "request", {}).get("option", None) + option = email_msg.get("data", {}).get("request", {}).get("option", None) if option and option in [ nr_notification.Option.BEFORE_EXPIRY.value, nr_notification.Option.EXPIRED.value, @@ -188,38 +173,29 @@ def process_email( elif etype == "incorporationApplication" and option == "mras": email = mras_notification.process(email_msg["email"]) elif etype == "annualReport" and option == "reminder": - flag_on = flags.is_on("disable-specific-service-provider") - email = ar_reminder_notification.process( - email_msg["email"], token, flag_on) + flag_on = flags.value("disable-specific-service-provider") + email = ar_reminder_notification.process(email_msg["email"], token, flag_on) elif etype == "agmLocationChange" and option == Filing.Status.COMPLETED.value: - email = agm_location_change_notification.process( - email_msg["email"], token) + email = agm_location_change_notification.process(email_msg["email"], token) elif etype == "agmExtension" and option == Filing.Status.COMPLETED.value: - email = agm_extension_notification.process( - email_msg["email"], token) + email = agm_extension_notification.process(email_msg["email"], token) elif etype == "dissolution": email = dissolution_notification.process(email_msg["email"], token) elif etype == "registration": - email = registration_notification.process( - email_msg["email"], token) + email = registration_notification.process(email_msg["email"], token) elif etype == "restoration": email = restoration_notification.process(email_msg["email"], token) elif etype == "changeOfRegistration": - email = change_of_registration_notification.process( - email_msg["email"], token - ) + email = change_of_registration_notification.process(email_msg["email"], token) elif etype == "correction": email = correction_notification.process(email_msg["email"], token) elif etype == "consentContinuationOut": - email = consent_continuation_out_notification.process( - email_msg["email"], token - ) + email = consent_continuation_out_notification.process(email_msg["email"], token) elif etype == "continuationOut": - email = continuation_out_notification.process( - email_msg["email"], token) + email = continuation_out_notification.process(email_msg["email"], token) elif etype == "specialResolution": - email = special_resolution_notification.process( - email_msg["email"], token) + email = special_resolution_notification.process(email_msg["email"], token) + # pylint: disable-next=consider-iterating-dictionary elif etype in filing_notification.FILING_TYPE_CONVERTER.keys(): if etype == "annualReport" and option == Filing.Status.COMPLETED.value: return None @@ -233,6 +209,7 @@ def process_email( def send_email(email: dict, token: str): + """Send the email""" return requests.post( f'{current_app.get("NOTIFY_API_URL", "")}', json=email, diff --git a/queue_services/entity-emailer/src/entity_emailer/services/__init__.py b/queue_services/entity-emailer/src/entity_emailer/services/__init__.py index 19d346fa93..aeb71bc835 100644 --- a/queue_services/entity-emailer/src/entity_emailer/services/__init__.py +++ b/queue_services/entity-emailer/src/entity_emailer/services/__init__.py @@ -34,5 +34,4 @@ """This module contains all the services used.""" from .gcp_queue import GcpQueue - queue = GcpQueue() diff --git a/queue_services/entity-emailer/src/entity_emailer/services/gcp_queue.py b/queue_services/entity-emailer/src/entity_emailer/services/gcp_queue.py index 146824b964..806f144624 100644 --- a/queue_services/entity-emailer/src/entity_emailer/services/gcp_queue.py +++ b/queue_services/entity-emailer/src/entity_emailer/services/gcp_queue.py @@ -36,24 +36,29 @@ import base64 import json -from concurrent.futures import CancelledError from concurrent.futures import TimeoutError # pylint: disable=W0622 +from concurrent.futures import CancelledError from contextlib import suppress from typing import Optional from flask import Flask, current_app -from werkzeug.local import LocalProxy from google.auth import jwt from google.cloud import pubsub_v1 -from simple_cloudevent import CloudEventVersionException -from simple_cloudevent import InvalidCloudEventError -from simple_cloudevent import SimpleCloudEvent -from simple_cloudevent import from_queue_message -from simple_cloudevent import to_queue_message +from simple_cloudevent import ( + CloudEventVersionException, + InvalidCloudEventError, + SimpleCloudEvent, + from_queue_message, + to_queue_message, +) +from werkzeug.local import LocalProxy class GcpQueue: + """Provides Queue type services""" + def __init__(self, app: Flask = None): + """Initializes the GCP Queue class""" self.audience = None self.credentials_pub = None self.gcp_auth_key = None @@ -62,9 +67,10 @@ def __init__(self, app: Flask = None): self._publisher = None if app: - self.app_init(app) + self.init_app(app) def init_app(self, app: Flask): + """Initializes the application""" self.gcp_auth_key = app.config.get("GCP_AUTH_KEY") if self.gcp_auth_key: try: @@ -77,32 +83,24 @@ def init_app(self, app: Flask): "https://pubsub.googleapis.com/google.pubsub.v1.Publisher", ) - self.service_account_info = json.loads( - base64.b64decode(self.gcp_auth_key).decode("utf-8") - ) - credentials = jwt.Credentials.from_service_account_info( - self.service_account_info, audience=audience - ) - self.credentials_pub = credentials.with_claims( - audience=publisher_audience - ) + self.service_account_info = json.loads(base64.b64decode(self.gcp_auth_key).decode("utf-8")) + credentials = jwt.Credentials.from_service_account_info(self.service_account_info, audience=audience) + self.credentials_pub = credentials.with_claims(audience=publisher_audience) except Exception as error: # noqa: B902 - raise Exception( - "Unable to create a connection", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to create a connection", error) from error # pylint: disable=W0719 @property def publisher(self): + """Returns the publisher""" if not self._publisher and self.credentials_pub: - self._publisher = pubsub_v1.PublisherClient( - credentials=self.credentials_pub - ) + self._publisher = pubsub_v1.PublisherClient(credentials=self.credentials_pub) else: self._publisher = pubsub_v1.PublisherClient() return self.credentials_pub @staticmethod def is_valid_envelope(msg: dict): + """Checks if the envelope is valid""" if ( msg.get("subscription") and (message := msg.get("message")) @@ -114,17 +112,14 @@ def is_valid_envelope(msg: dict): @staticmethod def get_envelope(request: LocalProxy) -> Optional[dict]: + """Returns the envelope""" with suppress(Exception): - if (envelope := request.get_json()) and GcpQueue.is_valid_envelope( - envelope - ): + if (envelope := request.get_json()) and GcpQueue.is_valid_envelope(envelope): return envelope return None @staticmethod - def get_simple_cloud_event( - request: LocalProxy, return_raw: bool = False - ) -> type[SimpleCloudEvent | dict | None]: + def get_simple_cloud_event(request: LocalProxy, return_raw: bool = False) -> type[SimpleCloudEvent | dict | None]: """Return a SimpleCloudEvent if one is in session from the PubSub call. Parameters ------------ @@ -174,14 +169,14 @@ def publish(self, topic: str, payload: bytes): return future.result() except (CancelledError, TimeoutError) as error: - raise Exception( - "Unable to post to queue", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to post to queue", error) from error # pylint: disable=W0719 @staticmethod def to_queue_message(ce: SimpleCloudEvent): + """Return a byte string, of the CloudEvent in JSON format""" return to_queue_message(ce) @staticmethod def from_queue_message(data: dict): + """Convert a queue message back to a simple CloudEvent""" return from_queue_message(data) diff --git a/queue_services/entity-emailer/src/entity_emailer/services/logging.py b/queue_services/entity-emailer/src/entity_emailer/services/logging.py index 8fb46cbb79..a72b49e940 100644 --- a/queue_services/entity-emailer/src/entity_emailer/services/logging.py +++ b/queue_services/entity-emailer/src/entity_emailer/services/logging.py @@ -40,29 +40,28 @@ def structured_log(request: LocalProxy, severity: str = "NOTICE", message: str = None): + """Prints structured log message""" frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) # Build structured log messages as an object. global_log_fields = {} - if PROJECT := os.environ.get("GOOGLE_CLOUD_PROJECT"): + if project := os.environ.get("GOOGLE_CLOUD_PROJECT"): # Add log correlation to nest all log messages. trace_header = request.headers.get("X-Cloud-Trace-Context") - if trace_header and PROJECT: + if trace_header and project: trace = trace_header.split("/") - global_log_fields[ - "logging.googleapis.com/trace" - ] = f"projects/{PROJECT}/traces/{trace[0]}" + global_log_fields["logging.googleapis.com/trace"] = f"projects/{project}/traces/{trace[0]}" # Complete a structured log entry. - entry = dict( - severity=severity, - message=message, + entry = { + "severity": severity, + "message": message, # Log viewer accesses 'component' as jsonPayload.component'. - component=f"{mod.__name__}.{frm.function}", + "component": f"{mod.__name__}.{frm.function}", **global_log_fields, - ) + } print(json.dumps(entry)) diff --git a/queue_services/entity-emailer/tests/__init__.py b/queue_services/entity-emailer/tests/__init__.py index 6e3b119ed0..ca02a01299 100644 --- a/queue_services/entity-emailer/tests/__init__.py +++ b/queue_services/entity-emailer/tests/__init__.py @@ -14,7 +14,6 @@ """The Test Suites to ensure that the service is built and operating correctly.""" import datetime - EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362) diff --git a/queue_services/entity-emailer/tests/conftest.py b/queue_services/entity-emailer/tests/conftest.py index 54571cff98..65a038dcdc 100644 --- a/queue_services/entity-emailer/tests/conftest.py +++ b/queue_services/entity-emailer/tests/conftest.py @@ -20,8 +20,8 @@ import psycopg2 import pytest -from flask import Flask import sqlalchemy +from flask import Flask from sqlalchemy import event, text from entity_emailer import create_app @@ -67,9 +67,7 @@ def create_test_db( DATABASE_URI = DATABASE_URI[: DATABASE_URI.rfind("/")] + "/postgres" try: - with sqlalchemy.create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with sqlalchemy.create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(f"CREATE DATABASE {database}")) return True @@ -100,15 +98,12 @@ def drop_test_db( WHERE pg_stat_activity.datname = '{database}' AND pid <> pg_backend_pid(); """ - with contextlib.suppress( - sqlalchemy.exc.ProgrammingError, psycopg2.OperationalError, Exception - ): - with sqlalchemy.create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with contextlib.suppress(sqlalchemy.exc.ProgrammingError, psycopg2.OperationalError, Exception): + with sqlalchemy.create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(close_all)) conn.execute(text(f"DROP DATABASE {database}")) + @contextmanager def not_raises(exception): """Corallary to the pytest raises builtin. @@ -118,22 +113,23 @@ def not_raises(exception): try: yield except exception: - raise pytest.fail(f'DID RAISE {exception}') + raise pytest.fail(f"DID RAISE {exception}") # fixture to freeze utcnow to a fixed date-time @pytest.fixture def freeze_datetime_utcnow(monkeypatch): """Fixture to return a static time for utcnow().""" + class _Datetime: @classmethod def utcnow(cls): return FROZEN_DATETIME - monkeypatch.setattr(datetime, 'datetime', _Datetime) + monkeypatch.setattr(datetime, "datetime", _Datetime) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def app(): """Return a session-wide application configured in TEST mode.""" _app = create_app(Testing) @@ -147,13 +143,13 @@ def config(app): return app.config -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def client(app): # pylint: disable=redefined-outer-name """Return a session-wide Flask test client.""" return app.test_client() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def db(app): # pylint: disable=redefined-outer-name, invalid-name """Return a session-wide initialised database. @@ -178,7 +174,8 @@ def db(app): # pylint: disable=redefined-outer-name, invalid-name database_uri=app.config.get("SQLALCHEMY_DATABASE_URI"), ) -@pytest.fixture(scope='function') + +@pytest.fixture(scope="function") def session(app, db): # pylint: disable=redefined-outer-name, invalid-name """Return a function-scoped session.""" with app.app_context(): @@ -197,7 +194,7 @@ def session(app, db): # pylint: disable=redefined-outer-name, invalid-name # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint) sess.begin_nested() - @event.listens_for(sess(), 'after_transaction_end') + @event.listens_for(sess(), "after_transaction_end") def restart_savepoint(sess2, trans): # pylint: disable=unused-variable # Detecting whether this is indeed the nested transaction of the test if trans.nested and not trans._parent.nested: # pylint: disable=protected-access @@ -207,7 +204,7 @@ def restart_savepoint(sess2, trans): # pylint: disable=unused-variable db.session = sess - sql = text('select 1') + sql = text("select 1") sess.execute(sql) yield sess diff --git a/queue_services/entity-emailer/tests/pytest_marks.py b/queue_services/entity-emailer/tests/pytest_marks.py index dfce8b6ad5..843be7596a 100644 --- a/queue_services/entity-emailer/tests/pytest_marks.py +++ b/queue_services/entity-emailer/tests/pytest_marks.py @@ -17,10 +17,10 @@ import pytest from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) -colin_api_integration = pytest.mark.skipif((os.getenv('RUN_COLIN_API', False) is False), - reason='requires access to COLIN API') +colin_api_integration = pytest.mark.skipif( + (os.getenv("RUN_COLIN_API", False) is False), reason="requires access to COLIN API" +) diff --git a/queue_services/entity-emailer/tests/unit/__init__.py b/queue_services/entity-emailer/tests/unit/__init__.py index f4571169de..47c2e319c4 100644 --- a/queue_services/entity-emailer/tests/unit/__init__.py +++ b/queue_services/entity-emailer/tests/unit/__init__.py @@ -39,7 +39,7 @@ from random import randrange from unittest.mock import Mock -from legal_api.models import LegalEntity, Filing, RegistrationBootstrap, User +from legal_api.models import Filing, LegalEntity, RegistrationBootstrap, User from registry_schemas.example_data import ( AGM_EXTENSION, AGM_LOCATION_CHANGE, @@ -66,16 +66,15 @@ from tests import EPOCH_DATETIME - FILING_TYPE_MAPPER = { # annual report structure is different than other 2 - 'annualReport': ANNUAL_REPORT['filing']['annualReport'], - 'changeOfAddress': CORP_CHANGE_OF_ADDRESS, - 'changeOfDirectors': CHANGE_OF_DIRECTORS, - 'alteration': ALTERATION + "annualReport": ANNUAL_REPORT["filing"]["annualReport"], + "changeOfAddress": CORP_CHANGE_OF_ADDRESS, + "changeOfDirectors": CHANGE_OF_DIRECTORS, + "alteration": ALTERATION, } -LEGAL_NAME = 'test business' +LEGAL_NAME = "test business" def create_user(user_name: str): @@ -96,7 +95,9 @@ def create_legal_entity(identifier, legal_type=None, legal_name=None): return legal_entity -def create_filing(token=None, filing_json=None, legal_entity_id=None, filing_date=EPOCH_DATETIME, bootstrap_id: str = None): +def create_filing( + token=None, filing_json=None, legal_entity_id=None, filing_date=EPOCH_DATETIME, bootstrap_id: str = None +): """Return a test filing.""" filing = Filing() if token: @@ -116,29 +117,28 @@ def create_filing(token=None, filing_json=None, legal_entity_id=None, filing_dat def prep_incorp_filing(session, identifier, payment_id, option, legal_type=None): """Return a new incorp filing prepped for email notification.""" - legal_entity = create_legal_entity( - identifier, legal_type=legal_type, legal_name=LEGAL_NAME) + legal_entity = create_legal_entity(identifier, legal_type=legal_type, legal_name=LEGAL_NAME) filing_template = copy.deepcopy(INCORPORATION_FILING_TEMPLATE) - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier} + filing_template["filing"]["business"] = {"identifier": legal_entity.identifier} if legal_entity.legal_type: - filing_template['filing']['business']['legalType'] = legal_entity.legal_type - filing_template['filing']['incorporationApplication']['nameRequest']['legalType'] = legal_entity.legal_type - for party in filing_template['filing']['incorporationApplication']['parties']: - for role in party['roles']: - if role['roleType'] == 'Completing Party': - party['officer']['email'] = 'comp_party@email.com' - filing_template['filing']['incorporationApplication']['contactPoint']['email'] = 'test@test.com' - - temp_identifier = 'Tb31yQIuBw' + filing_template["filing"]["business"]["legalType"] = legal_entity.legal_type + filing_template["filing"]["incorporationApplication"]["nameRequest"]["legalType"] = legal_entity.legal_type + for party in filing_template["filing"]["incorporationApplication"]["parties"]: + for role in party["roles"]: + if role["roleType"] == "Completing Party": + party["officer"]["email"] = "comp_party@email.com" + filing_template["filing"]["incorporationApplication"]["contactPoint"]["email"] = "test@test.com" + + temp_identifier = "Tb31yQIuBw" temp_reg = RegistrationBootstrap() temp_reg._identifier = temp_identifier temp_reg.save() - filing = create_filing(token=payment_id, filing_json=filing_template, - legal_entity_id=legal_entity.id, bootstrap_id=temp_identifier) + filing = create_filing( + token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id, bootstrap_id=temp_identifier + ) filing.payment_completion_date = filing.filing_date filing.save() - if option in ['COMPLETED', 'bn']: + if option in ["COMPLETED", "bn"]: uow = versioning_manager.unit_of_work(session) transaction = uow.create_transaction(session) filing.transaction_id = transaction.id @@ -148,39 +148,31 @@ def prep_incorp_filing(session, identifier, payment_id, option, legal_type=None) def prep_registration_filing(session, identifier, payment_id, option, legal_type, legal_name): """Return a new registration filing prepped for email notification.""" - now = datetime.now().strftime('%Y-%m-%d') - REGISTRATION['business']['naics'] = { - 'naicsCode': '112320', - 'naicsDescription': 'Broiler and other meat-type chicken production' + now = datetime.now().strftime("%Y-%m-%d") + REGISTRATION["business"]["naics"] = { + "naicsCode": "112320", + "naicsDescription": "Broiler and other meat-type chicken production", } gp_registration = copy.deepcopy(FILING_HEADER) - gp_registration['filing']['header']['name'] = 'registration' - gp_registration['filing']['registration'] = copy.deepcopy(REGISTRATION) - gp_registration['filing']['registration']['startDate'] = now - gp_registration['filing']['registration']['nameRequest']['legalName'] = legal_name - gp_registration['filing']['registration']['parties'][1]['officer']['email'] = 'party@email.com' + gp_registration["filing"]["header"]["name"] = "registration" + gp_registration["filing"]["registration"] = copy.deepcopy(REGISTRATION) + gp_registration["filing"]["registration"]["startDate"] = now + gp_registration["filing"]["registration"]["nameRequest"]["legalName"] = legal_name + gp_registration["filing"]["registration"]["parties"][1]["officer"]["email"] = "party@email.com" sp_registration = copy.deepcopy(FILING_HEADER) - sp_registration['filing']['header']['name'] = 'registration' - sp_registration['filing']['registration'] = copy.deepcopy(REGISTRATION) - sp_registration['filing']['registration']['startDate'] = now - sp_registration['filing']['registration']['nameRequest']['legalType'] = 'SP' - sp_registration['filing']['registration']['nameRequest']['legalName'] = legal_name - sp_registration['filing']['registration']['businessType'] = 'SP' - sp_registration['filing']['registration']['parties'][0]['roles'] = [ - { - 'roleType': 'Completing Party', - 'appointmentDate': '2022-01-01' - - }, - { - 'roleType': 'Proprietor', - 'appointmentDate': '2022-01-01' - - } + sp_registration["filing"]["header"]["name"] = "registration" + sp_registration["filing"]["registration"] = copy.deepcopy(REGISTRATION) + sp_registration["filing"]["registration"]["startDate"] = now + sp_registration["filing"]["registration"]["nameRequest"]["legalType"] = "SP" + sp_registration["filing"]["registration"]["nameRequest"]["legalName"] = legal_name + sp_registration["filing"]["registration"]["businessType"] = "SP" + sp_registration["filing"]["registration"]["parties"][0]["roles"] = [ + {"roleType": "Completing Party", "appointmentDate": "2022-01-01"}, + {"roleType": "Proprietor", "appointmentDate": "2022-01-01"}, ] - del sp_registration['filing']['registration']['parties'][1] + del sp_registration["filing"]["registration"]["parties"][1] if legal_type == LegalEntity.EntityTypes.SOLE_PROP.value: filing_template = sp_registration @@ -188,24 +180,23 @@ def prep_registration_filing(session, identifier, payment_id, option, legal_type filing_template = gp_registration legal_entity_id = None - if option == 'PAID': - del filing_template['filing']['business'] - elif option == 'COMPLETED': + if option == "PAID": + del filing_template["filing"]["business"] + elif option == "COMPLETED": legal_entity = create_legal_entity(identifier, legal_type) legal_entity.founding_date = datetime.fromisoformat(now) legal_entity.save() legal_entity_id = legal_entity.id - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier, - 'legalType': legal_entity.legal_type, - 'foundingDate': legal_entity.founding_date.isoformat() + filing_template["filing"]["business"] = { + "identifier": legal_entity.identifier, + "legalType": legal_entity.legal_type, + "foundingDate": legal_entity.founding_date.isoformat(), } - filing = create_filing( - token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity_id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity_id) filing.payment_completion_date = filing.filing_date filing.save() - if option in ['COMPLETED']: + if option in ["COMPLETED"]: uow = versioning_manager.unit_of_work(session) transaction = uow.create_transaction(session) filing.transaction_id = transaction.id @@ -217,32 +208,28 @@ def prep_dissolution_filing(session, identifier, payment_id, option, legal_type, """Return a new dissolution filing prepped for email notification.""" legal_entity = create_legal_entity(identifier, legal_type, legal_name) filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'dissolution' + filing_template["filing"]["header"]["name"] = "dissolution" if submitter_role: - filing_template['filing']['header'][ - 'documentOptionalEmail'] = f'{submitter_role}@email.com' - - filing_template['filing']['dissolution'] = copy.deepcopy(DISSOLUTION) - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["header"]["documentOptionalEmail"] = f"{submitter_role}@email.com" + + filing_template["filing"]["dissolution"] = copy.deepcopy(DISSOLUTION) + filing_template["filing"]["business"] = { + "identifier": legal_entity.identifier, + "legalType": legal_type, + "legalName": legal_name, } - for party in filing_template['filing']['dissolution']['parties']: - for role in party['roles']: - if role['roleType'] == 'Custodian': - party['officer']['email'] = 'custodian@email.com' - elif role['roleType'] == 'Completing Party': - party['officer']['email'] = 'cp@email.com' + for party in filing_template["filing"]["dissolution"]["parties"]: + for role in party["roles"]: + if role["roleType"] == "Custodian": + party["officer"]["email"] = "custodian@email.com" + elif role["roleType"] == "Completing Party": + party["officer"]["email"] = "cp@email.com" - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=legal_entity.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id if submitter_role: filing.submitter_roles = submitter_role @@ -255,26 +242,21 @@ def prep_consent_continuation_out_filing(session, identifier, payment_id, legal_ """Return a new consent continuation out filing prepped for email notification.""" legal_entity = create_legal_entity(identifier, legal_type, legal_name) filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'consentContinuationOut' + filing_template["filing"]["header"]["name"] = "consentContinuationOut" if submitter_role: - filing_template['filing']['header'][ - 'documentOptionalEmail'] = f'{submitter_role}@email.com' - - filing_template['filing']['consentContinuationOut'] = copy.deepcopy( - CONSENT_CONTINUATION_OUT) - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["header"]["documentOptionalEmail"] = f"{submitter_role}@email.com" + + filing_template["filing"]["consentContinuationOut"] = copy.deepcopy(CONSENT_CONTINUATION_OUT) + filing_template["filing"]["business"] = { + "identifier": legal_entity.identifier, + "legalType": legal_type, + "legalName": legal_name, } - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=legal_entity.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id if submitter_role: filing.submitter_roles = submitter_role @@ -287,26 +269,21 @@ def prep_continuation_out_filing(session, identifier, payment_id, legal_type, le """Return a new continuation out filing prepped for email notification.""" legal_entity = create_legal_entity(identifier, legal_type, legal_name) filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'continuationOut' + filing_template["filing"]["header"]["name"] = "continuationOut" if submitter_role: - filing_template['filing']['header'][ - 'documentOptionalEmail'] = f'{submitter_role}@email.com' - - filing_template['filing']['continuationOut'] = copy.deepcopy( - CONTINUATION_OUT) - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["header"]["documentOptionalEmail"] = f"{submitter_role}@email.com" + + filing_template["filing"]["continuationOut"] = copy.deepcopy(CONTINUATION_OUT) + filing_template["filing"]["business"] = { + "identifier": legal_entity.identifier, + "legalType": legal_type, + "legalName": legal_name, } - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=legal_entity.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id if submitter_role: filing.submitter_roles = submitter_role @@ -315,7 +292,7 @@ def prep_continuation_out_filing(session, identifier, payment_id, legal_type, le return filing -def prep_restoration_filing(identifier, payment_id, legal_type, legal_name, r_type='fullRestoration'): +def prep_restoration_filing(identifier, payment_id, legal_type, legal_name, r_type="fullRestoration"): """Return a new restoration filing prepped for email notification. @param r_type: @@ -327,22 +304,19 @@ def prep_restoration_filing(identifier, payment_id, legal_type, legal_name, r_ty """ legal_entity = create_legal_entity(identifier, legal_type, legal_name) filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'restoration' - filing_template['filing']['restoration'] = copy.deepcopy(RESTORATION) - filing_template['filing']['restoration']['type'] = r_type - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["header"]["name"] = "restoration" + filing_template["filing"]["restoration"] = copy.deepcopy(RESTORATION) + filing_template["filing"]["restoration"]["type"] = r_type + filing_template["filing"]["business"] = { + "identifier": legal_entity.identifier, + "legalType": legal_type, + "legalName": legal_name, } - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=legal_entity.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id filing.save() @@ -354,50 +328,36 @@ def prep_change_of_registration_filing(session, identifier, payment_id, legal_ty legal_entity = create_legal_entity(identifier, legal_type, legal_name) gp_change_of_registration = copy.deepcopy(FILING_HEADER) - gp_change_of_registration['filing']['header']['name'] = 'changeOfRegistration' - gp_change_of_registration['filing']['changeOfRegistration'] = copy.deepcopy( - CHANGE_OF_REGISTRATION) - gp_change_of_registration['filing']['changeOfRegistration']['parties'][0]['officer']['email'] = 'party@email.com' + gp_change_of_registration["filing"]["header"]["name"] = "changeOfRegistration" + gp_change_of_registration["filing"]["changeOfRegistration"] = copy.deepcopy(CHANGE_OF_REGISTRATION) + gp_change_of_registration["filing"]["changeOfRegistration"]["parties"][0]["officer"]["email"] = "party@email.com" sp_change_of_registration = copy.deepcopy(FILING_HEADER) - sp_change_of_registration['filing']['header']['name'] = 'changeOfRegistration' - sp_change_of_registration['filing']['changeOfRegistration'] = copy.deepcopy( - CHANGE_OF_REGISTRATION) - sp_change_of_registration['filing']['changeOfRegistration']['parties'][0]['roles'] = [ - { - 'roleType': 'Completing Party', - 'appointmentDate': '2022-01-01' - - }, - { - 'roleType': 'Proprietor', - 'appointmentDate': '2022-01-01' - - } + sp_change_of_registration["filing"]["header"]["name"] = "changeOfRegistration" + sp_change_of_registration["filing"]["changeOfRegistration"] = copy.deepcopy(CHANGE_OF_REGISTRATION) + sp_change_of_registration["filing"]["changeOfRegistration"]["parties"][0]["roles"] = [ + {"roleType": "Completing Party", "appointmentDate": "2022-01-01"}, + {"roleType": "Proprietor", "appointmentDate": "2022-01-01"}, ] - sp_change_of_registration['filing']['changeOfRegistration']['parties'][0]['officer']['email'] = 'party@email.com' + sp_change_of_registration["filing"]["changeOfRegistration"]["parties"][0]["officer"]["email"] = "party@email.com" if legal_type == LegalEntity.EntityTypes.SOLE_PROP.value: filing_template = sp_change_of_registration elif legal_type == LegalEntity.EntityTypes.PARTNERSHIP.value: filing_template = gp_change_of_registration - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["business"] = { + "identifier": legal_entity.identifier, + "legalType": legal_type, + "legalName": legal_name, } if submitter_role: - filing_template['filing']['header'][ - 'documentOptionalEmail'] = f'{submitter_role}@email.com' + filing_template["filing"]["header"]["documentOptionalEmail"] = f"{submitter_role}@email.com" - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=legal_entity.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id if submitter_role: filing.submitter_roles = submitter_role @@ -409,13 +369,15 @@ def prep_change_of_registration_filing(session, identifier, payment_id, legal_ty def prep_alteration_filing(session, identifier, option, company_name): """Return an alteration filing prepped for email notification.""" legal_entity = create_legal_entity( - identifier, legal_type=LegalEntity.EntityTypes.BCOMP.value, legal_name=company_name) + identifier, legal_type=LegalEntity.EntityTypes.BCOMP.value, legal_name=company_name + ) filing_template = copy.deepcopy(ALTERATION_FILING_TEMPLATE) - filing_template['filing']['business'] = \ - {'identifier': f'{identifier}', - 'legalype': LegalEntity.EntityTypes.BCOMP.value, 'legalName': company_name} - filing = create_filing(filing_json=filing_template, - legal_entity_id=legal_entity.id) + filing_template["filing"]["business"] = { + "identifier": f"{identifier}", + "legalype": LegalEntity.EntityTypes.BCOMP.value, + "legalName": company_name, + } + filing = create_filing(filing_json=filing_template, legal_entity_id=legal_entity.id) filing.save() return filing @@ -425,23 +387,19 @@ def prep_agm_location_change_filing(identifier, payment_id, legal_type, legal_na """Return a new AGM location change filing prepped for email notification.""" business = create_legal_entity(identifier, legal_type, legal_name) filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'agmLocationChange' - - filing_template['filing']['agmLocationChange'] = copy.deepcopy( - AGM_LOCATION_CHANGE) - filing_template['filing']['business'] = { - 'identifier': business.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["header"]["name"] = "agmLocationChange" + + filing_template["filing"]["agmLocationChange"] = copy.deepcopy(AGM_LOCATION_CHANGE) + filing_template["filing"]["business"] = { + "identifier": business.identifier, + "legalType": legal_type, + "legalName": legal_name, } - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=business.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=business.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id filing.save() @@ -452,22 +410,19 @@ def prep_agm_extension_filing(identifier, payment_id, legal_type, legal_name): """Return a new AGM extension filing prepped for email notification.""" business = create_legal_entity(identifier, legal_type, legal_name) filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'agmExtension' + filing_template["filing"]["header"]["name"] = "agmExtension" - filing_template['filing']['agmExtension'] = copy.deepcopy(AGM_EXTENSION) - filing_template['filing']['business'] = { - 'identifier': business.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["agmExtension"] = copy.deepcopy(AGM_EXTENSION) + filing_template["filing"]["business"] = { + "identifier": business.identifier, + "legalType": legal_type, + "legalName": legal_name, } - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=business.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=business.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id filing.save() @@ -476,29 +431,27 @@ def prep_agm_extension_filing(identifier, payment_id, legal_type, legal_name): def prep_maintenance_filing(session, identifier, payment_id, status, filing_type, submitter_role=None): """Return a new maintenance filing prepped for email notification.""" - legal_entity = create_legal_entity( - identifier, LegalEntity.EntityTypes.BCOMP.value, LEGAL_NAME) + legal_entity = create_legal_entity(identifier, LegalEntity.EntityTypes.BCOMP.value, LEGAL_NAME) filing_template = copy.deepcopy(FILING_TEMPLATE) - filing_template['filing']['header']['name'] = filing_type - filing_template['filing']['business'] = \ - {'identifier': f'{identifier}', - 'legalype': LegalEntity.EntityTypes.BCOMP.value, 'legalName': LEGAL_NAME} - filing_template['filing'][filing_type] = copy.deepcopy( - FILING_TYPE_MAPPER[filing_type]) + filing_template["filing"]["header"]["name"] = filing_type + filing_template["filing"]["business"] = { + "identifier": f"{identifier}", + "legalype": LegalEntity.EntityTypes.BCOMP.value, + "legalName": LEGAL_NAME, + } + filing_template["filing"][filing_type] = copy.deepcopy(FILING_TYPE_MAPPER[filing_type]) if submitter_role: - filing_template['filing']['header'][ - 'documentOptionalEmail'] = f'{submitter_role}@email.com' - filing = create_filing( - token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) + filing_template["filing"]["header"]["documentOptionalEmail"] = f"{submitter_role}@email.com" + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id if submitter_role: filing.submitter_roles = submitter_role filing.save() - if status == 'COMPLETED': + if status == "COMPLETED": uow = versioning_manager.unit_of_work(session) transaction = uow.create_transaction(session) filing.transaction_id = transaction.id @@ -510,19 +463,17 @@ def prep_maintenance_filing(session, identifier, payment_id, status, filing_type def prep_incorporation_correction_filing(session, legal_entity, original_filing_id, payment_id, option): """Return a new incorporation correction filing prepped for email notification.""" filing_template = copy.deepcopy(CORRECTION_INCORPORATION) - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier} - for party in filing_template['filing']['correction']['parties']: - for role in party['roles']: - if role['roleType'] == 'Completing Party': - party['officer']['email'] = 'comp_party@email.com' - filing_template['filing']['correction']['contactPoint']['email'] = 'test@test.com' - filing_template['filing']['correction']['correctedFilingId'] = original_filing_id - filing = create_filing( - token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) + filing_template["filing"]["business"] = {"identifier": legal_entity.identifier} + for party in filing_template["filing"]["correction"]["parties"]: + for role in party["roles"]: + if role["roleType"] == "Completing Party": + party["officer"]["email"] = "comp_party@email.com" + filing_template["filing"]["correction"]["contactPoint"]["email"] = "test@test.com" + filing_template["filing"]["correction"]["correctedFilingId"] = original_filing_id + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date filing.save() - if option in ['COMPLETED']: + if option in ["COMPLETED"]: uow = versioning_manager.unit_of_work(session) transaction = uow.create_transaction(session) filing.transaction_id = transaction.id @@ -535,42 +486,31 @@ def prep_firm_correction_filing(session, identifier, payment_id, legal_type, leg legal_entity = create_legal_entity(identifier, legal_type, legal_name) gp_correction = copy.deepcopy(CORRECTION_REGISTRATION) - gp_correction['filing']['correction']['parties'][0]['officer']['email'] = 'party@email.com' + gp_correction["filing"]["correction"]["parties"][0]["officer"]["email"] = "party@email.com" sp_correction = copy.deepcopy(CORRECTION_REGISTRATION) - sp_correction['filing']['correction']['parties'][0]['officer']['email'] = 'party@email.com' - sp_correction['filing']['correction']['parties'][0]['roles'] = [ - { - 'roleType': 'Completing Party', - 'appointmentDate': '2022-01-01' - - }, - { - 'roleType': 'Proprietor', - 'appointmentDate': '2022-01-01' - - } + sp_correction["filing"]["correction"]["parties"][0]["officer"]["email"] = "party@email.com" + sp_correction["filing"]["correction"]["parties"][0]["roles"] = [ + {"roleType": "Completing Party", "appointmentDate": "2022-01-01"}, + {"roleType": "Proprietor", "appointmentDate": "2022-01-01"}, ] - sp_correction['filing']['correction']['parties'][0]['officer']['email'] = 'party@email.com' + sp_correction["filing"]["correction"]["parties"][0]["officer"]["email"] = "party@email.com" if legal_type == LegalEntity.EntityTypes.SOLE_PROP.value: filing_template = sp_correction elif legal_type == LegalEntity.EntityTypes.PARTNERSHIP.value: filing_template = gp_correction - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier, - 'legalType': legal_type, - 'legalName': legal_name + filing_template["filing"]["business"] = { + "identifier": legal_entity.identifier, + "legalType": legal_type, + "legalName": legal_name, } - filing = create_filing( - token=payment_id, - filing_json=filing_template, - legal_entity_id=legal_entity.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date - user = create_user('test_user') + user = create_user("test_user") filing.submitter_id = user.id if submitter_role: filing.submitter_roles = submitter_role @@ -581,29 +521,24 @@ def prep_firm_correction_filing(session, identifier, payment_id, legal_type, leg def prep_cp_special_resolution_filing(identifier, payment_id, legal_type, legal_name, submitter_role=None): """Return a new cp special resolution out filing prepped for email notification.""" - legal_entity = create_legal_entity( - identifier, legal_type=legal_type, legal_name=legal_name) + legal_entity = create_legal_entity(identifier, legal_type=legal_type, legal_name=legal_name) filing_template = copy.deepcopy(CP_SPECIAL_RESOLUTION_TEMPLATE) - filing_template['filing']['business'] = \ - {'identifier': f'{identifier}', 'legalype': legal_type, 'legalName': legal_name} - filing_template['filing']['alteration'] = { - 'business': { - 'identifier': 'BC1234567', - 'legalType': 'BEN' - }, - 'contactPoint': { - 'email': 'joe@email.com' - }, - 'rulesInResolution': True, - 'rulesFileKey': 'cooperative/a8abe1a6-4f45-4105-8a05-822baee3b743.pdf' + filing_template["filing"]["business"] = { + "identifier": f"{identifier}", + "legalype": legal_type, + "legalName": legal_name, + } + filing_template["filing"]["alteration"] = { + "business": {"identifier": "BC1234567", "legalType": "BEN"}, + "contactPoint": {"email": "joe@email.com"}, + "rulesInResolution": True, + "rulesFileKey": "cooperative/a8abe1a6-4f45-4105-8a05-822baee3b743.pdf", } if submitter_role: - filing_template['filing']['header'][ - 'documentOptionalEmail'] = f'{submitter_role}@email.com' - filing = create_filing( - token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) + filing_template["filing"]["header"]["documentOptionalEmail"] = f"{submitter_role}@email.com" + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) - user = create_user('cp_test_user') + user = create_user("cp_test_user") filing.submitter_id = user.id if submitter_role: filing.submitter_roles = submitter_role @@ -611,32 +546,29 @@ def prep_cp_special_resolution_filing(identifier, payment_id, legal_type, legal_ return filing -def prep_cp_special_resolution_correction_filing(session, legal_entity, original_filing_id, - payment_id, option, corrected_filing_type): +def prep_cp_special_resolution_correction_filing( + session, legal_entity, original_filing_id, payment_id, option, corrected_filing_type +): """Return a cp special resolution correction filing prepped for email notification.""" filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'correction' - filing_template['filing']['correction'] = copy.deepcopy( - CORRECTION_CP_SPECIAL_RESOLUTION) - filing_template['filing']['business'] = { - 'identifier': legal_entity.identifier} - filing_template['filing']['correction']['contactPoint']['email'] = 'cp_sr@test.com' - filing_template['filing']['correction']['correctedFilingId'] = original_filing_id - filing_template['filing']['correction']['correctedFilingType'] = corrected_filing_type - filing_template['filing']['correction']['nameRequest'] = { - 'nrNumber': 'NR 8798956', - 'legalName': 'HAULER MEDIA INC.', - 'legalType': 'BC', - 'requestType': 'CHG' + filing_template["filing"]["header"]["name"] = "correction" + filing_template["filing"]["correction"] = copy.deepcopy(CORRECTION_CP_SPECIAL_RESOLUTION) + filing_template["filing"]["business"] = {"identifier": legal_entity.identifier} + filing_template["filing"]["correction"]["contactPoint"]["email"] = "cp_sr@test.com" + filing_template["filing"]["correction"]["correctedFilingId"] = original_filing_id + filing_template["filing"]["correction"]["correctedFilingType"] = corrected_filing_type + filing_template["filing"]["correction"]["nameRequest"] = { + "nrNumber": "NR 8798956", + "legalName": "HAULER MEDIA INC.", + "legalType": "BC", + "requestType": "CHG", } - filing = create_filing( - token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=legal_entity.id) filing.payment_completion_date = filing.filing_date # Triggered from the filer. - filing._meta_data = {'correction': { - 'uploadNewRules': True, 'toLegalName': True}} + filing._meta_data = {"correction": {"uploadNewRules": True, "toLegalName": True}} filing.save() - if option in ['COMPLETED']: + if option in ["COMPLETED"]: uow = versioning_manager.unit_of_work(session) transaction = uow.create_transaction(session) filing.transaction_id = transaction.id @@ -644,29 +576,26 @@ def prep_cp_special_resolution_correction_filing(session, legal_entity, original return filing -def prep_cp_special_resolution_correction_upload_memorandum_filing(session, business, - original_filing_id, - payment_id, option, - corrected_filing_type): +def prep_cp_special_resolution_correction_upload_memorandum_filing( + session, business, original_filing_id, payment_id, option, corrected_filing_type +): """Return a cp special resolution correction filing prepped for email notification.""" filing_template = copy.deepcopy(FILING_HEADER) - filing_template['filing']['header']['name'] = 'correction' - filing_template['filing']['correction'] = copy.deepcopy( - CORRECTION_CP_SPECIAL_RESOLUTION) - filing_template['filing']['business'] = {'identifier': business.identifier} - filing_template['filing']['correction']['contactPoint']['email'] = 'cp_sr@test.com' - filing_template['filing']['correction']['correctedFilingId'] = original_filing_id - filing_template['filing']['correction']['correctedFilingType'] = corrected_filing_type - del filing_template['filing']['correction']['resolution'] - filing_template['filing']['correction']['memorandumFileKey'] = '28f73dc4-8e7c-4c89-bef6-a81dff909ca6.pdf' - filing_template['filing']['correction']['memorandumFileName'] = 'test.pdf' - filing = create_filing( - token=payment_id, filing_json=filing_template, business_id=business.id) + filing_template["filing"]["header"]["name"] = "correction" + filing_template["filing"]["correction"] = copy.deepcopy(CORRECTION_CP_SPECIAL_RESOLUTION) + filing_template["filing"]["business"] = {"identifier": business.identifier} + filing_template["filing"]["correction"]["contactPoint"]["email"] = "cp_sr@test.com" + filing_template["filing"]["correction"]["correctedFilingId"] = original_filing_id + filing_template["filing"]["correction"]["correctedFilingType"] = corrected_filing_type + del filing_template["filing"]["correction"]["resolution"] + filing_template["filing"]["correction"]["memorandumFileKey"] = "28f73dc4-8e7c-4c89-bef6-a81dff909ca6.pdf" + filing_template["filing"]["correction"]["memorandumFileName"] = "test.pdf" + filing = create_filing(token=payment_id, filing_json=filing_template, legal_entity_id=business.id) filing.payment_completion_date = filing.filing_date # Triggered from the filer. - filing._meta_data = {'correction': {'uploadNewMemorandum': True}} + filing._meta_data = {"correction": {"uploadNewMemorandum": True}} filing.save() - if option in ['COMPLETED']: + if option in ["COMPLETED"]: uow = versioning_manager.unit_of_work(session) transaction = uow.create_transaction(session) filing.transaction_id = transaction.id @@ -703,7 +632,7 @@ def nested_session(session): sess = session.begin_nested() yield sess sess.rollback() - except: + except: # noqa: E722 pass finally: pass diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_affiliation_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_affiliation_notification.py index bfea6d4f76..a0ed1cdb84 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_affiliation_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_affiliation_notification.py @@ -20,28 +20,24 @@ def test_notifications(app, session): """Assert Affiliation notification is created.""" - subject = 'How to use BCRegistry.ca' - company_name = 'Company Name' - testing_email = 'test@test.com' - token = 'token' - filing = prep_alteration_filing(session, 'BC1234567', 'DRAFT', company_name) + subject = "How to use BCRegistry.ca" + company_name = "Company Name" + testing_email = "test@test.com" + token = "token" + filing = prep_alteration_filing(session, "BC1234567", "DRAFT", company_name) # test processor - with patch.object(affiliation_notification, 'get_recipients', return_value=testing_email): + with patch.object(affiliation_notification, "get_recipients", return_value=testing_email): email = affiliation_notification.process( { - 'data': { - 'filing': { - 'header': {'filingId': filing.id} - } - }, - 'type': 'bc.registry.affiliation', - 'identifier': 'BC1234567' + "data": {"filing": {"header": {"filingId": filing.id}}}, + "type": "bc.registry.affiliation", + "identifier": "BC1234567", }, - token + token, ) - assert email['content']['subject'] == company_name + ' - ' + subject + assert email["content"]["subject"] == company_name + " - " + subject - assert testing_email in email['recipients'] - assert email['content']['body'] + assert testing_email in email["recipients"] + assert email["content"]["body"] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_agm_extension_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_agm_extension_notification.py index ed10100ab5..e4e3a4b92c 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_agm_extension_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_agm_extension_notification.py @@ -21,29 +21,34 @@ from tests.unit import prep_agm_extension_filing -@pytest.mark.parametrize("status,legal_name,is_numbered", [ - ("COMPLETED", "test business", False), - ("COMPLETED", "BC1234567", True), -]) +@pytest.mark.parametrize( + "status,legal_name,is_numbered", + [ + ("COMPLETED", "test business", False), + ("COMPLETED", "BC1234567", True), + ], +) def test_agm_extension_notification(app, session, status, legal_name, is_numbered): """Assert that the agm extension email processor works as expected.""" # setup filing + business for email - filing = prep_agm_extension_filing( - "BC1234567", "1", LegalEntity.EntityTypes.COMP.value, legal_name) + filing = prep_agm_extension_filing("BC1234567", "1", LegalEntity.EntityTypes.COMP.value, legal_name) token = "token" # test processor with patch.object(agm_extension_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: - with patch.object(agm_extension_notification, "get_recipient_from_auth", - return_value="recipient@email.com"): + with patch.object(agm_extension_notification, "get_recipient_from_auth", return_value="recipient@email.com"): email = agm_extension_notification.process( - {"filingId": filing.id, "type": "agmExtension", "option": status}, token) + {"filingId": filing.id, "type": "agmExtension", "option": status}, token + ) - if (is_numbered): - assert email["content"]["subject"] == \ - "Numbered Company - AGM Extension Documents from the Business Registry" + if is_numbered: + assert ( + email["content"]["subject"] + == "Numbered Company - AGM Extension Documents from the Business Registry" + ) else: - assert email["content"]["subject"] == \ - legal_name + " - AGM Extension Documents from the Business Registry" + assert ( + email["content"]["subject"] == legal_name + " - AGM Extension Documents from the Business Registry" + ) assert "recipient@email.com" in email["recipients"] assert email["content"]["body"] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_agm_location_change_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_agm_location_change_notification.py index 4bf3201f4f..dc9ded127d 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_agm_location_change_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_agm_location_change_notification.py @@ -21,29 +21,37 @@ from tests.unit import prep_agm_location_change_filing -@pytest.mark.parametrize("status,legal_name,is_numbered", [ - ("COMPLETED", "test business", False), - ("COMPLETED", "BC1234567", True), -]) +@pytest.mark.parametrize( + "status,legal_name,is_numbered", + [ + ("COMPLETED", "test business", False), + ("COMPLETED", "BC1234567", True), + ], +) def test_agm_location_change_notification(app, session, status, legal_name, is_numbered): """Assert that the agm location change email processor works as expected.""" # setup filing + business for email - filing = prep_agm_location_change_filing( - "BC1234567", "1", LegalEntity.EntityTypes.COMP.value, legal_name) + filing = prep_agm_location_change_filing("BC1234567", "1", LegalEntity.EntityTypes.COMP.value, legal_name) token = "token" # test processor with patch.object(agm_location_change_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: - with patch.object(agm_location_change_notification, "get_recipient_from_auth", - return_value="recipient@email.com"): + with patch.object( + agm_location_change_notification, "get_recipient_from_auth", return_value="recipient@email.com" + ): email = agm_location_change_notification.process( - {"filingId": filing.id, "type": "agmLocationChange", "option": status}, token) + {"filingId": filing.id, "type": "agmLocationChange", "option": status}, token + ) - if (is_numbered): - assert email["content"]["subject"] == \ - "Numbered Company - AGM Location Change Documents from the Business Registry" + if is_numbered: + assert ( + email["content"]["subject"] + == "Numbered Company - AGM Location Change Documents from the Business Registry" + ) else: - assert email["content"]["subject"] == \ - legal_name + " - AGM Location Change Documents from the Business Registry" + assert ( + email["content"]["subject"] + == legal_name + " - AGM Location Change Documents from the Business Registry" + ) assert "recipient@email.com" in email["recipients"] assert email["content"]["body"] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_ar_reminder_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_ar_reminder_notification.py index 3e7681e177..d6aec8e35e 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_ar_reminder_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_ar_reminder_notification.py @@ -30,14 +30,20 @@ def test_ar_reminder_notification(app, session): token = "token" flag_on = False # test processor - with patch.object(ar_reminder_notification, "get_recipient_from_auth", return_value="test@test.com") \ - as mock_get_recipient_from_auth: + with patch.object( + ar_reminder_notification, "get_recipient_from_auth", return_value="test@test.com" + ) as mock_get_recipient_from_auth: email = ar_reminder_notification.process( { "businessId": filing.business_id, - "type": "annualReport", "option": "reminder", - "arFee": "100", "arYear": 2021 - }, token, flag_on) + "type": "annualReport", + "option": "reminder", + "arFee": "100", + "arYear": 2021, + }, + token, + flag_on, + ) assert email["content"]["subject"] == "test business 2021 Annual Report Reminder" assert "test@test.com" in email["recipients"] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_bn_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_bn_notification.py index 6993545e26..8c43cbc0aa 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_bn_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_bn_notification.py @@ -23,45 +23,44 @@ def test_bn_notificaton(app, session): """Assert that the bn email processor builds the email correctly.""" # setup filing + business for email - identifier = 'BC1234567' - filing = prep_incorp_filing(session, identifier, '1', 'bn') + identifier = "BC1234567" + filing = prep_incorp_filing(session, identifier, "1", "bn") business = LegalEntity.find_by_identifier(identifier) # sanity check assert filing.id assert business.id # run processor email = bn_notification.process( - {'filingId': None, 'type': 'businessNumber', 'option': 'bn', 'identifier': 'BC1234567'}) + {"filingId": None, "type": "businessNumber", "option": "bn", "identifier": "BC1234567"} + ) # check email values - assert 'comp_party@email.com' in email['recipients'] - assert 'test@test.com' in email['recipients'] - assert email['content']['subject'] == f'{business.legal_name} - Business Number Information' - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert "comp_party@email.com" in email["recipients"] + assert "test@test.com" in email["recipients"] + assert email["content"]["subject"] == f"{business.legal_name} - Business Number Information" + assert email["content"]["body"] + assert email["content"]["attachments"] == [] def test_bn_move_notificaton(app, session): """Assert that the bn move email processor builds the email correctly.""" # setup filing + business for email - identifier = 'FM1234567' - filing = prep_registration_filing(session, identifier, '1', 'COMPLETED', - LegalEntity.EntityTypes.SOLE_PROP.value, 'test business') - token = 'token' + identifier = "FM1234567" + filing = prep_registration_filing( + session, identifier, "1", "COMPLETED", LegalEntity.EntityTypes.SOLE_PROP.value, "test business" + ) + token = "token" business = LegalEntity.find_by_identifier(identifier) # sanity check assert filing.id assert business.id # run processor - with patch.object(bn_notification, 'get_recipient_from_auth', return_value='user@email.com'): - email = bn_notification.process_bn_move({'identifier': identifier, - 'data': { - 'oldBn': '993775204BC0001', - 'newBn': '993777399BC0001' - }}, - token) + with patch.object(bn_notification, "get_recipient_from_auth", return_value="user@email.com"): + email = bn_notification.process_bn_move( + {"identifier": identifier, "data": {"oldBn": "993775204BC0001", "newBn": "993777399BC0001"}}, token + ) # check email values - assert 'user@email.com' in email['recipients'] - assert email['content']['subject'] == f'{business.legal_name} - Business Number Changed' - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert "user@email.com" in email["recipients"] + assert email["content"]["subject"] == f"{business.legal_name} - Business Number Changed" + assert email["content"]["body"] + assert email["content"]["attachments"] == [] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_change_of_registration_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_change_of_registration_notification.py index 68d94202fc..d046a37237 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_change_of_registration_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_change_of_registration_notification.py @@ -21,50 +21,56 @@ from tests.unit import prep_change_of_registration_filing -@pytest.mark.parametrize('status,legal_type,submitter_role', [ - ('PAID', LegalEntity.EntityTypes.SOLE_PROP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.SOLE_PROP.value, None), - ('PAID', LegalEntity.EntityTypes.PARTNERSHIP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.PARTNERSHIP.value, None), - - ('PAID', LegalEntity.EntityTypes.SOLE_PROP.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.SOLE_PROP.value, 'staff'), - ('PAID', LegalEntity.EntityTypes.PARTNERSHIP.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.PARTNERSHIP.value, 'staff'), -]) +@pytest.mark.parametrize( + "status,legal_type,submitter_role", + [ + ("PAID", LegalEntity.EntityTypes.SOLE_PROP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.SOLE_PROP.value, None), + ("PAID", LegalEntity.EntityTypes.PARTNERSHIP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.PARTNERSHIP.value, None), + ("PAID", LegalEntity.EntityTypes.SOLE_PROP.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.SOLE_PROP.value, "staff"), + ("PAID", LegalEntity.EntityTypes.PARTNERSHIP.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.PARTNERSHIP.value, "staff"), + ], +) def test_change_of_registration_notification(app, session, mocker, status, legal_type, submitter_role): """Assert that email attributes are correct.""" # setup filing + business for email - legal_name = 'test business' - filing = prep_change_of_registration_filing(session, 'FM1234567', '1', legal_type, legal_name, submitter_role) - token = 'token' + legal_name = "test business" + filing = prep_change_of_registration_filing(session, "FM1234567", "1", legal_type, legal_name, submitter_role) + token = "token" # test processor mocker.patch( - 'entity_emailer.email_processors.change_of_registration_notification.get_user_email_from_auth', - return_value='user@email.com') - with patch.object(change_of_registration_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: + "entity_emailer.email_processors.change_of_registration_notification.get_user_email_from_auth", + return_value="user@email.com", + ) + with patch.object(change_of_registration_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: email = change_of_registration_notification.process( - {'filingId': filing.id, 'type': 'changeOfRegistration', 'option': status}, token) - if status == 'PAID': - assert email['content']['subject'] == legal_name + ' - Confirmation of Filing from the Business Registry' + {"filingId": filing.id, "type": "changeOfRegistration", "option": status}, token + ) + if status == "PAID": + assert email["content"]["subject"] == legal_name + " - Confirmation of Filing from the Business Registry" else: - assert email['content']['subject'] == \ - legal_name + ' - Change of Registration Documents from the Business Registry' + assert ( + email["content"]["subject"] + == legal_name + " - Change of Registration Documents from the Business Registry" + ) if submitter_role: - assert f'{submitter_role}@email.com' in email['recipients'] + assert f"{submitter_role}@email.com" in email["recipients"] else: - assert 'user@email.com' in email['recipients'] + assert "user@email.com" in email["recipients"] - if status == 'COMPLETED': - assert 'no_one@never.get' in email['recipients'] + if status == "COMPLETED": + assert "no_one@never.get" in email["recipients"] if legal_type == LegalEntity.EntityTypes.PARTNERSHIP.value: - assert 'party@email.com' in email['recipients'] + assert "party@email.com" in email["recipients"] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - if status == 'COMPLETED': - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'FM1234567' + if status == "COMPLETED": + assert mock_get_pdfs.call_args[0][2]["identifier"] == "FM1234567" assert mock_get_pdfs.call_args[0][3] == filing diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_consent_continuation_out_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_consent_continuation_out_notification.py index efc6007475..5930a242cd 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_consent_continuation_out_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_consent_continuation_out_notification.py @@ -21,38 +21,42 @@ from tests.unit import prep_consent_continuation_out_filing -@pytest.mark.parametrize('status,legal_type,submitter_role', [ - ('COMPLETED', LegalEntity.EntityTypes.COMP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BCOMP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BC_CCC.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), - ('COMPLETED', LegalEntity.EntityTypes.COMP.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.BCOMP.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.BC_CCC.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.BC_ULC_COMPANY.value, 'staff') -]) +@pytest.mark.parametrize( + "status,legal_type,submitter_role", + [ + ("COMPLETED", LegalEntity.EntityTypes.COMP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BCOMP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BC_CCC.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), + ("COMPLETED", LegalEntity.EntityTypes.COMP.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.BCOMP.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.BC_CCC.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.BC_ULC_COMPANY.value, "staff"), + ], +) def test_consent_continuation_out_notification(app, session, status, legal_type, submitter_role): """Assert that the consent_continuation_out email processor for corps works as expected.""" # setup filing + business for email - legal_name = 'test business' - filing = prep_consent_continuation_out_filing(session, 'BC1234567', '1', legal_type, legal_name, submitter_role) - token = 'token' + legal_name = "test business" + filing = prep_consent_continuation_out_filing(session, "BC1234567", "1", legal_type, legal_name, submitter_role) + token = "token" # test processor - with patch.object(consent_continuation_out_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - with patch.object(consent_continuation_out_notification, 'get_recipient_from_auth', - return_value='recipient@email.com'): + with patch.object(consent_continuation_out_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + with patch.object( + consent_continuation_out_notification, "get_recipient_from_auth", return_value="recipient@email.com" + ): email = consent_continuation_out_notification.process( - {'filingId': filing.id, 'type': 'consentContinuationOut', 'option': status}, token) - assert email['content']['subject'] == \ - legal_name + ' - Confirmation of Filing from the Business Registry' + {"filingId": filing.id, "type": "consentContinuationOut", "option": status}, token + ) + assert email["content"]["subject"] == legal_name + " - Confirmation of Filing from the Business Registry" if submitter_role: - assert f'{submitter_role}@email.com' in email['recipients'] - assert 'recipient@email.com' in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert f"{submitter_role}@email.com" in email["recipients"] + assert "recipient@email.com" in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == token - assert mock_get_pdfs.call_args[0][1]['identifier'] == 'BC1234567' - assert mock_get_pdfs.call_args[0][1]['legalName'] == legal_name - assert mock_get_pdfs.call_args[0][1]['legalType'] == legal_type + assert mock_get_pdfs.call_args[0][1]["identifier"] == "BC1234567" + assert mock_get_pdfs.call_args[0][1]["legalName"] == legal_name + assert mock_get_pdfs.call_args[0][1]["legalType"] == legal_type assert mock_get_pdfs.call_args[0][2] == filing diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_continuation_out_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_continuation_out_notification.py index aec024b2d7..1321e499ed 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_continuation_out_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_continuation_out_notification.py @@ -21,38 +21,40 @@ from tests.unit import prep_continuation_out_filing -@pytest.mark.parametrize('status,legal_type,submitter_role', [ - ('COMPLETED', LegalEntity.EntityTypes.COMP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BCOMP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BC_CCC.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), - ('COMPLETED', LegalEntity.EntityTypes.COMP.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.BCOMP.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.BC_CCC.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.BC_ULC_COMPANY.value, 'staff') -]) +@pytest.mark.parametrize( + "status,legal_type,submitter_role", + [ + ("COMPLETED", LegalEntity.EntityTypes.COMP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BCOMP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BC_CCC.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), + ("COMPLETED", LegalEntity.EntityTypes.COMP.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.BCOMP.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.BC_CCC.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.BC_ULC_COMPANY.value, "staff"), + ], +) def test_continuation_out_notification(app, session, status, legal_type, submitter_role): """Assert that the continuation_out email processor for corps works as expected.""" # setup filing + business for email - legal_name = 'test business' - filing = prep_continuation_out_filing(session, 'BC1234567', '1', legal_type, legal_name, submitter_role) - token = 'token' + legal_name = "test business" + filing = prep_continuation_out_filing(session, "BC1234567", "1", legal_type, legal_name, submitter_role) + token = "token" # test processor - with patch.object(continuation_out_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - with patch.object(continuation_out_notification, 'get_recipient_from_auth', - return_value='recipient@email.com'): + with patch.object(continuation_out_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + with patch.object(continuation_out_notification, "get_recipient_from_auth", return_value="recipient@email.com"): email = continuation_out_notification.process( - {'filingId': filing.id, 'type': 'continuationOut', 'option': status}, token) - assert email['content']['subject'] == \ - legal_name + ' - Confirmation of Filing from the Business Registry' + {"filingId": filing.id, "type": "continuationOut", "option": status}, token + ) + assert email["content"]["subject"] == legal_name + " - Confirmation of Filing from the Business Registry" if submitter_role: - assert f'{submitter_role}@email.com' in email['recipients'] - assert 'recipient@email.com' in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert f"{submitter_role}@email.com" in email["recipients"] + assert "recipient@email.com" in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == token - assert mock_get_pdfs.call_args[0][1]['identifier'] == 'BC1234567' - assert mock_get_pdfs.call_args[0][1]['legalName'] == legal_name - assert mock_get_pdfs.call_args[0][1]['legalType'] == legal_type + assert mock_get_pdfs.call_args[0][1]["identifier"] == "BC1234567" + assert mock_get_pdfs.call_args[0][1]["legalName"] == legal_name + assert mock_get_pdfs.call_args[0][1]["legalType"] == legal_type assert mock_get_pdfs.call_args[0][2] == filing diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_correction_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_correction_notification.py index d53edd4f1e..dee0e77492 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_correction_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_correction_notification.py @@ -28,123 +28,126 @@ prep_incorporation_correction_filing, ) +COMPLETED_SUBJECT_SUFIX = " - Correction Documents from the Business Registry" +CP_IDENTIFIER = "CP1234567" +SPECIAL_RESOLUTION_FILING_TYPE = "specialResolution" -COMPLETED_SUBJECT_SUFIX = ' - Correction Documents from the Business Registry' -CP_IDENTIFIER = 'CP1234567' -SPECIAL_RESOLUTION_FILING_TYPE = 'specialResolution' - -@pytest.mark.parametrize('status,legal_type', [ - ('PAID', LegalEntity.EntityTypes.SOLE_PROP.value), - ('COMPLETED', LegalEntity.EntityTypes.SOLE_PROP.value), - ('PAID', LegalEntity.EntityTypes.PARTNERSHIP.value), - ('COMPLETED', LegalEntity.EntityTypes.PARTNERSHIP.value), -]) +@pytest.mark.parametrize( + "status,legal_type", + [ + ("PAID", LegalEntity.EntityTypes.SOLE_PROP.value), + ("COMPLETED", LegalEntity.EntityTypes.SOLE_PROP.value), + ("PAID", LegalEntity.EntityTypes.PARTNERSHIP.value), + ("COMPLETED", LegalEntity.EntityTypes.PARTNERSHIP.value), + ], +) def test_firm_correction_notification(app, session, status, legal_type): """Assert that email attributes are correct.""" # setup filing + business for email - legal_name = 'test business' - filing = prep_firm_correction_filing(session, 'FM1234567', '1', legal_type, legal_name, 'staff') - token = 'token' + legal_name = "test business" + filing = prep_firm_correction_filing(session, "FM1234567", "1", legal_type, legal_name, "staff") + token = "token" # test processor - with patch.object(correction_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - email = correction_notification.process( - {'filingId': filing.id, 'type': 'correction', 'option': status}, token) - if status == 'PAID': - assert email['content']['subject'] == legal_name + ' - Confirmation of Filing from the Business Registry' + with patch.object(correction_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + email = correction_notification.process({"filingId": filing.id, "type": "correction", "option": status}, token) + if status == "PAID": + assert email["content"]["subject"] == legal_name + " - Confirmation of Filing from the Business Registry" else: - assert email['content']['subject'] == \ - legal_name + COMPLETED_SUBJECT_SUFIX + assert email["content"]["subject"] == legal_name + COMPLETED_SUBJECT_SUFIX - if status == 'COMPLETED': - assert 'no_one@never.get' in email['recipients'] + if status == "COMPLETED": + assert "no_one@never.get" in email["recipients"] if legal_type == LegalEntity.EntityTypes.PARTNERSHIP.value: - assert 'party@email.com' in email['recipients'] + assert "party@email.com" in email["recipients"] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - if status == 'COMPLETED': - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'FM1234567' + if status == "COMPLETED": + assert mock_get_pdfs.call_args[0][2]["identifier"] == "FM1234567" assert mock_get_pdfs.call_args[0][3] == filing -@pytest.mark.parametrize('status,legal_type', [ - ('PAID', LegalEntity.EntityTypes.COMP.value), - ('COMPLETED', LegalEntity.EntityTypes.COMP.value), - ('PAID', LegalEntity.EntityTypes.BCOMP.value), - ('COMPLETED', LegalEntity.EntityTypes.BCOMP.value), - ('PAID', LegalEntity.EntityTypes.BC_CCC.value), - ('COMPLETED', LegalEntity.EntityTypes.BC_CCC.value), - ('PAID', LegalEntity.EntityTypes.BC_ULC_COMPANY.value), - ('COMPLETED', LegalEntity.EntityTypes.BC_ULC_COMPANY.value), -]) +@pytest.mark.parametrize( + "status,legal_type", + [ + ("PAID", LegalEntity.EntityTypes.COMP.value), + ("COMPLETED", LegalEntity.EntityTypes.COMP.value), + ("PAID", LegalEntity.EntityTypes.BCOMP.value), + ("COMPLETED", LegalEntity.EntityTypes.BCOMP.value), + ("PAID", LegalEntity.EntityTypes.BC_CCC.value), + ("COMPLETED", LegalEntity.EntityTypes.BC_CCC.value), + ("PAID", LegalEntity.EntityTypes.BC_ULC_COMPANY.value), + ("COMPLETED", LegalEntity.EntityTypes.BC_ULC_COMPANY.value), + ], +) def test_bc_correction_notification(app, session, status, legal_type): """Assert that email attributes are correct.""" # setup filing + business for email - legal_name = 'test business' - original_filing = prep_incorp_filing(session, 'BC1234567', '1', status, legal_type=legal_type) - token = 'token' - business = LegalEntity.find_by_identifier('BC1234567') - filing = prep_incorporation_correction_filing(session, business, original_filing.id, '1', status) + legal_name = "test business" + original_filing = prep_incorp_filing(session, "BC1234567", "1", status, legal_type=legal_type) + token = "token" + business = LegalEntity.find_by_identifier("BC1234567") + filing = prep_incorporation_correction_filing(session, business, original_filing.id, "1", status) # test processor - with patch.object(correction_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - email = correction_notification.process( - {'filingId': filing.id, 'type': 'correction', 'option': status}, token) - if status == 'PAID': - assert email['content']['subject'] == legal_name + ' - Confirmation of Filing from the Business Registry' + with patch.object(correction_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + email = correction_notification.process({"filingId": filing.id, "type": "correction", "option": status}, token) + if status == "PAID": + assert email["content"]["subject"] == legal_name + " - Confirmation of Filing from the Business Registry" else: - assert email['content']['subject'] == \ - legal_name + COMPLETED_SUBJECT_SUFIX + assert email["content"]["subject"] == legal_name + COMPLETED_SUBJECT_SUFIX - assert 'comp_party@email.com' in email['recipients'] - assert 'test@test.com' in email['recipients'] + assert "comp_party@email.com" in email["recipients"] + assert "test@test.com" in email["recipients"] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - if status == 'COMPLETED': - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'BC1234567' + if status == "COMPLETED": + assert mock_get_pdfs.call_args[0][2]["identifier"] == "BC1234567" assert mock_get_pdfs.call_args[0][3] == filing -@pytest.mark.parametrize('status,legal_type', [ - ('PAID', LegalEntity.EntityTypes.COOP.value), - ('COMPLETED', LegalEntity.EntityTypes.COOP.value), -]) +@pytest.mark.parametrize( + "status,legal_type", + [ + ("PAID", LegalEntity.EntityTypes.COOP.value), + ("COMPLETED", LegalEntity.EntityTypes.COOP.value), + ], +) def test_cp_special_resolution_correction_notification(app, session, status, legal_type): """Assert that email attributes are correct.""" # setup filing + business for email - legal_name = 'cp business' - original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, '1', legal_type, legal_name, submitter_role=None) - token = 'token' + legal_name = "cp business" + original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, "1", legal_type, legal_name, submitter_role=None) + token = "token" business = LegalEntity.find_by_identifier(CP_IDENTIFIER) - filing = prep_cp_special_resolution_correction_filing(session, business, original_filing.id, - '1', status, SPECIAL_RESOLUTION_FILING_TYPE) + filing = prep_cp_special_resolution_correction_filing( + session, business, original_filing.id, "1", status, SPECIAL_RESOLUTION_FILING_TYPE + ) # test processor - with patch.object(correction_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - email = correction_notification.process( - {'filingId': filing.id, 'type': 'correction', 'option': status}, token) - if status == 'PAID': - assert email['content']['subject'] == legal_name + ' - Confirmation of correction' + with patch.object(correction_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + email = correction_notification.process({"filingId": filing.id, "type": "correction", "option": status}, token) + if status == "PAID": + assert email["content"]["subject"] == legal_name + " - Confirmation of correction" else: - assert email['content']['subject'] == \ - legal_name + COMPLETED_SUBJECT_SUFIX + assert email["content"]["subject"] == legal_name + COMPLETED_SUBJECT_SUFIX - assert 'cp_sr@test.com' in email['recipients'] + assert "cp_sr@test.com" in email["recipients"] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - if status == 'COMPLETED': - assert mock_get_pdfs.call_args[0][2]['identifier'] == CP_IDENTIFIER + if status == "COMPLETED": + assert mock_get_pdfs.call_args[0][2]["identifier"] == CP_IDENTIFIER assert mock_get_pdfs.call_args[0][3] == filing @@ -152,112 +155,104 @@ def test_complete_special_resolution_correction_attachments(session, config): """Test completed special resolution correction notification.""" # setup filing + business for email legal_type = LegalEntity.EntityTypes.COOP.value - legal_name = 'test cp sr business' - token = 'token' - status = 'COMPLETED' - original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, '1', legal_type, legal_name, submitter_role=None) + legal_name = "test cp sr business" + token = "token" + status = "COMPLETED" + original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, "1", legal_type, legal_name, submitter_role=None) business = LegalEntity.find_by_identifier(CP_IDENTIFIER) - filing = prep_cp_special_resolution_correction_filing(session, business, original_filing.id, - '1', status, SPECIAL_RESOLUTION_FILING_TYPE) + filing = prep_cp_special_resolution_correction_filing( + session, business, original_filing.id, "1", status, SPECIAL_RESOLUTION_FILING_TYPE + ) with requests_mock.Mocker() as m: m.get( ( f'{config.get("LEGAL_API_URL")}' - f'/businesses/{CP_IDENTIFIER}' - f'/filings/{filing.id}' - f'?type=specialResolution' + f"/businesses/{CP_IDENTIFIER}" + f"/filings/{filing.id}" + f"?type=specialResolution" ), - content=b'pdf_content_1', - status_code=200 + content=b"pdf_content_1", + status_code=200, ) m.get( f'{config.get("LEGAL_API_URL")}/businesses/{CP_IDENTIFIER}/filings/{filing.id}' - '?type=certificateOfNameChange', - content=b'pdf_content_2', - status_code=200 + "?type=certificateOfNameChange", + content=b"pdf_content_2", + status_code=200, ) m.get( f'{config.get("LEGAL_API_URL")}/businesses/{CP_IDENTIFIER}/filings/{filing.id}?type=certifiedRules', - content=b'pdf_content_3', - status_code=200 + content=b"pdf_content_3", + status_code=200, ) - output = correction_notification.process({ - 'filingId': filing.id, - 'type': 'correction', - 'option': status - }, token) - assert 'content' in output - assert 'attachments' in output['content'] - assert len(output['content']['attachments']) == 3 - assert output['content']['attachments'][0]['fileName'] == 'Special Resolution.pdf' - assert base64.b64decode(output['content']['attachments'][0]['fileBytes']).decode('utf-8') == 'pdf_content_1' - assert output['content']['attachments'][1]['fileName'] == 'Certificate of Name Change.pdf' - assert base64.b64decode(output['content']['attachments'][1]['fileBytes']).decode('utf-8') == 'pdf_content_2' - assert output['content']['attachments'][2]['fileName'] == 'Certified Rules.pdf' - assert base64.b64decode(output['content']['attachments'][2]['fileBytes']).decode('utf-8') == 'pdf_content_3' + output = correction_notification.process({"filingId": filing.id, "type": "correction", "option": status}, token) + assert "content" in output + assert "attachments" in output["content"] + assert len(output["content"]["attachments"]) == 3 + assert output["content"]["attachments"][0]["fileName"] == "Special Resolution.pdf" + assert base64.b64decode(output["content"]["attachments"][0]["fileBytes"]).decode("utf-8") == "pdf_content_1" + assert output["content"]["attachments"][1]["fileName"] == "Certificate of Name Change.pdf" + assert base64.b64decode(output["content"]["attachments"][1]["fileBytes"]).decode("utf-8") == "pdf_content_2" + assert output["content"]["attachments"][2]["fileName"] == "Certified Rules.pdf" + assert base64.b64decode(output["content"]["attachments"][2]["fileBytes"]).decode("utf-8") == "pdf_content_3" def test_paid_special_resolution_correction_attachments(session, config): """Test paid special resolution correction notification.""" # setup filing + business for email legal_type = LegalEntity.EntityTypes.COOP.value - legal_name = 'test cp sr business' - token = 'token' - status = 'PAID' - original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, '1', legal_type, legal_name, submitter_role=None) + legal_name = "test cp sr business" + token = "token" + status = "PAID" + original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, "1", legal_type, legal_name, submitter_role=None) business = LegalEntity.find_by_identifier(CP_IDENTIFIER) - filing = prep_cp_special_resolution_correction_filing(session, business, original_filing.id, - '1', status, SPECIAL_RESOLUTION_FILING_TYPE) + filing = prep_cp_special_resolution_correction_filing( + session, business, original_filing.id, "1", status, SPECIAL_RESOLUTION_FILING_TYPE + ) with requests_mock.Mocker() as m: m.get( - f'{config.get("LEGAL_API_URL")}/businesses/{CP_IDENTIFIER}/filings/{filing.id}' - f'?type=correction', - content=b'pdf_content_1', - status_code=200 - ) - m.post( - f'{config.get("PAY_API_URL")}/1/receipts', - content=b'pdf_content_2', - status_code=201 + f'{config.get("LEGAL_API_URL")}/businesses/{CP_IDENTIFIER}/filings/{filing.id}' f"?type=correction", + content=b"pdf_content_1", + status_code=200, ) - output = correction_notification.process({ - 'filingId': filing.id, - 'type': 'correction', - 'option': status - }, token) - assert 'content' in output - assert 'attachments' in output['content'] - assert len(output['content']['attachments']) == 2 - assert output['content']['attachments'][0]['fileName'] == 'Register Correction Application.pdf' - assert base64.b64decode(output['content']['attachments'][0]['fileBytes']).decode('utf-8') == 'pdf_content_1' - assert output['content']['attachments'][1]['fileName'] == 'Receipt.pdf' - assert base64.b64decode(output['content']['attachments'][1]['fileBytes']).decode('utf-8') == 'pdf_content_2' + m.post(f'{config.get("PAY_API_URL")}/1/receipts', content=b"pdf_content_2", status_code=201) + output = correction_notification.process({"filingId": filing.id, "type": "correction", "option": status}, token) + assert "content" in output + assert "attachments" in output["content"] + assert len(output["content"]["attachments"]) == 2 + assert output["content"]["attachments"][0]["fileName"] == "Register Correction Application.pdf" + assert base64.b64decode(output["content"]["attachments"][0]["fileBytes"]).decode("utf-8") == "pdf_content_1" + assert output["content"]["attachments"][1]["fileName"] == "Receipt.pdf" + assert base64.b64decode(output["content"]["attachments"][1]["fileBytes"]).decode("utf-8") == "pdf_content_2" -@pytest.mark.parametrize('legal_type, filing_type', [ - (LegalEntity.EntityTypes.COOP.value, SPECIAL_RESOLUTION_FILING_TYPE), - (LegalEntity.EntityTypes.CCC_CONTINUE_IN.value, SPECIAL_RESOLUTION_FILING_TYPE), - (LegalEntity.EntityTypes.COOP.value, 'registration'), -]) +@pytest.mark.parametrize( + "legal_type, filing_type", + [ + (LegalEntity.EntityTypes.COOP.value, SPECIAL_RESOLUTION_FILING_TYPE), + (LegalEntity.EntityTypes.CCC_CONTINUE_IN.value, SPECIAL_RESOLUTION_FILING_TYPE), + (LegalEntity.EntityTypes.COOP.value, "registration"), + ], +) def test_paid_special_resolution_correction_on_correction(session, config, legal_type, filing_type): """Assert that email attributes are correct.""" # setup filing + business for email - legal_name = 'cp business' - original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, '1', legal_type, - legal_name, submitter_role=None) - token = 'token' + legal_name = "cp business" + original_filing = prep_cp_special_resolution_filing(CP_IDENTIFIER, "1", legal_type, legal_name, submitter_role=None) + token = "token" business = LegalEntity.find_by_identifier(CP_IDENTIFIER) - filing_correction = prep_cp_special_resolution_correction_filing(session, business, original_filing.id, - '1', 'COMPLETED', filing_type) - filing = prep_cp_special_resolution_correction_filing(session, business, filing_correction.id, - '1', 'PAID', 'correction') + filing_correction = prep_cp_special_resolution_correction_filing( + session, business, original_filing.id, "1", "COMPLETED", filing_type + ) + filing = prep_cp_special_resolution_correction_filing( + session, business, filing_correction.id, "1", "PAID", "correction" + ) # test processor - with patch.object(correction_notification, '_get_pdfs', return_value=[]): - email = correction_notification.process( - {'filingId': filing.id, 'type': 'correction', 'option': 'PAID'}, token) - if legal_type == LegalEntity.EntityTypes.COOP.value and filing_type == 'specialResolution': - assert email['content']['subject'] == legal_name + ' - Confirmation of correction' - assert 'cp_sr@test.com' in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + with patch.object(correction_notification, "_get_pdfs", return_value=[]): + email = correction_notification.process({"filingId": filing.id, "type": "correction", "option": "PAID"}, token) + if legal_type == LegalEntity.EntityTypes.COOP.value and filing_type == "specialResolution": + assert email["content"]["subject"] == legal_name + " - Confirmation of correction" + assert "cp_sr@test.com" in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_cp_special_resolution_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_cp_special_resolution_notification.py index 7085151021..4b85175d9e 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_cp_special_resolution_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_cp_special_resolution_notification.py @@ -22,135 +22,138 @@ from entity_emailer.email_processors import special_resolution_notification from tests.unit import prep_cp_special_resolution_filing - LEGAL_TYPE = LegalEntity.EntityTypes.COOP.value -LEGAL_NAME = 'test business' -IDENTIFIER = 'CP1234567' -TOKEN = 'token' -RECIPIENT_EMAIL = 'recipient@email.com' -USER_EMAIL_FROM_AUTH = 'user@email.com' +LEGAL_NAME = "test business" +IDENTIFIER = "CP1234567" +TOKEN = "token" +RECIPIENT_EMAIL = "recipient@email.com" +USER_EMAIL_FROM_AUTH = "user@email.com" -@pytest.mark.parametrize('status', [ - ('PAID'), - ('COMPLETED') -]) +@pytest.mark.parametrize("status", [("PAID"), ("COMPLETED")]) def test_cp_special_resolution_notification(session, app, config, status): """Assert that the special resolution email processor works as expected.""" # setup filing + business for email - filing = prep_cp_special_resolution_filing(IDENTIFIER, '1', LEGAL_TYPE, LEGAL_NAME, submitter_role=None) - get_pdf_function = 'get_paid_pdfs' if status == 'PAID' else 'get_completed_pdfs' + filing = prep_cp_special_resolution_filing(IDENTIFIER, "1", LEGAL_TYPE, LEGAL_NAME, submitter_role=None) + get_pdf_function = "get_paid_pdfs" if status == "PAID" else "get_completed_pdfs" # test processor with patch.object(special_resolution_notification, get_pdf_function, return_value=[]) as mock_get_pdfs: - with patch.object(special_resolution_notification, 'get_recipient_from_auth', - return_value=RECIPIENT_EMAIL): - with patch.object(special_resolution_notification, 'get_user_email_from_auth', - return_value=USER_EMAIL_FROM_AUTH): + with patch.object(special_resolution_notification, "get_recipient_from_auth", return_value=RECIPIENT_EMAIL): + with patch.object( + special_resolution_notification, "get_user_email_from_auth", return_value=USER_EMAIL_FROM_AUTH + ): email = special_resolution_notification.process( - {'filingId': filing.id, 'type': 'specialResolution', 'option': status}, TOKEN) - if status == 'PAID': - assert email['content']['subject'] == LEGAL_NAME + \ - ' - Confirmation of Special Resolution from the Business Registry' + {"filingId": filing.id, "type": "specialResolution", "option": status}, TOKEN + ) + if status == "PAID": + assert ( + email["content"]["subject"] + == LEGAL_NAME + " - Confirmation of Special Resolution from the Business Registry" + ) else: - assert email['content']['subject'] == \ - LEGAL_NAME + ' - Special Resolution Documents from the Business Registry' + assert ( + email["content"]["subject"] + == LEGAL_NAME + " - Special Resolution Documents from the Business Registry" + ) - assert RECIPIENT_EMAIL in email['recipients'] - assert USER_EMAIL_FROM_AUTH in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert RECIPIENT_EMAIL in email["recipients"] + assert USER_EMAIL_FROM_AUTH in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == TOKEN - assert mock_get_pdfs.call_args[0][1]['identifier'] == IDENTIFIER + assert mock_get_pdfs.call_args[0][1]["identifier"] == IDENTIFIER assert mock_get_pdfs.call_args[0][2] == filing def test_complete_special_resolution_attachments(session, config): """Test completed special resolution notification.""" # setup filing + business for email - status = 'COMPLETED' - filing = prep_cp_special_resolution_filing(IDENTIFIER, '1', LEGAL_TYPE, LEGAL_NAME, submitter_role=None) + status = "COMPLETED" + filing = prep_cp_special_resolution_filing(IDENTIFIER, "1", LEGAL_TYPE, LEGAL_NAME, submitter_role=None) with requests_mock.Mocker() as m: - with patch.object(special_resolution_notification, 'get_recipient_from_auth', - return_value=RECIPIENT_EMAIL): - with patch.object(special_resolution_notification, 'get_user_email_from_auth', - return_value=USER_EMAIL_FROM_AUTH): + with patch.object(special_resolution_notification, "get_recipient_from_auth", return_value=RECIPIENT_EMAIL): + with patch.object( + special_resolution_notification, "get_user_email_from_auth", return_value=USER_EMAIL_FROM_AUTH + ): m.get( ( f'{config.get("LEGAL_API_URL")}' - f'/businesses/{IDENTIFIER}' - f'/filings/{filing.id}' - f'?type=specialResolution' + f"/businesses/{IDENTIFIER}" + f"/filings/{filing.id}" + f"?type=specialResolution" ), - content=b'pdf_content_1', - status_code=200 + content=b"pdf_content_1", + status_code=200, ) m.get( f'{config.get("LEGAL_API_URL")}/businesses/{IDENTIFIER}/filings/{filing.id}' - '?type=certificateOfNameChange', - content=b'pdf_content_2', - status_code=200 + "?type=certificateOfNameChange", + content=b"pdf_content_2", + status_code=200, ) m.get( f'{config.get("LEGAL_API_URL")}/businesses/{IDENTIFIER}/filings/{filing.id}?type=certifiedRules', - content=b'pdf_content_3', - status_code=200 + content=b"pdf_content_3", + status_code=200, ) - output = special_resolution_notification.process({ - 'filingId': filing.id, - 'type': 'specialResolution', - 'option': status - }, TOKEN) - assert 'content' in output - assert 'attachments' in output['content'] - assert len(output['content']['attachments']) == 3 - assert output['content']['attachments'][0]['fileName'] == 'Special Resolution.pdf' - assert (base64.b64decode(output['content']['attachments'][0]['fileBytes']).decode('utf-8') - == 'pdf_content_1') - assert output['content']['attachments'][1]['fileName'] == 'Certificate of Name Change.pdf' - assert (base64.b64decode(output['content']['attachments'][1]['fileBytes']).decode('utf-8') - == 'pdf_content_2') - assert output['content']['attachments'][2]['fileName'] == 'Certified Rules.pdf' - assert (base64.b64decode(output['content']['attachments'][2]['fileBytes']).decode('utf-8') - == 'pdf_content_3') + output = special_resolution_notification.process( + {"filingId": filing.id, "type": "specialResolution", "option": status}, TOKEN + ) + assert "content" in output + assert "attachments" in output["content"] + assert len(output["content"]["attachments"]) == 3 + assert output["content"]["attachments"][0]["fileName"] == "Special Resolution.pdf" + assert ( + base64.b64decode(output["content"]["attachments"][0]["fileBytes"]).decode("utf-8") + == "pdf_content_1" + ) + assert output["content"]["attachments"][1]["fileName"] == "Certificate of Name Change.pdf" + assert ( + base64.b64decode(output["content"]["attachments"][1]["fileBytes"]).decode("utf-8") + == "pdf_content_2" + ) + assert output["content"]["attachments"][2]["fileName"] == "Certified Rules.pdf" + assert ( + base64.b64decode(output["content"]["attachments"][2]["fileBytes"]).decode("utf-8") + == "pdf_content_3" + ) def test_paid_special_resolution_attachments(session, config): """Test paid special resolution notification.""" # setup filing + business for email - status = 'PAID' - filing = prep_cp_special_resolution_filing(IDENTIFIER, '1', LEGAL_TYPE, LEGAL_NAME, submitter_role=None) + status = "PAID" + filing = prep_cp_special_resolution_filing(IDENTIFIER, "1", LEGAL_TYPE, LEGAL_NAME, submitter_role=None) with requests_mock.Mocker() as m: - with patch.object(special_resolution_notification, 'get_recipient_from_auth', - return_value=RECIPIENT_EMAIL): - with patch.object(special_resolution_notification, 'get_user_email_from_auth', - return_value=USER_EMAIL_FROM_AUTH): + with patch.object(special_resolution_notification, "get_recipient_from_auth", return_value=RECIPIENT_EMAIL): + with patch.object( + special_resolution_notification, "get_user_email_from_auth", return_value=USER_EMAIL_FROM_AUTH + ): m.get( ( f'{config.get("LEGAL_API_URL")}' - f'/businesses/{IDENTIFIER}' - f'/filings/{filing.id}' - f'?type=specialResolutionApplication' + f"/businesses/{IDENTIFIER}" + f"/filings/{filing.id}" + f"?type=specialResolutionApplication" ), - content=b'pdf_content_1', - status_code=200 + content=b"pdf_content_1", + status_code=200, + ) + m.post(f'{config.get("PAY_API_URL")}/1/receipts', content=b"pdf_content_2", status_code=201) + output = special_resolution_notification.process( + {"filingId": filing.id, "type": "specialResolution", "option": status}, TOKEN + ) + assert "content" in output + assert "attachments" in output["content"] + assert len(output["content"]["attachments"]) == 2 + assert output["content"]["attachments"][0]["fileName"] == "Special Resolution Application.pdf" + assert ( + base64.b64decode(output["content"]["attachments"][0]["fileBytes"]).decode("utf-8") + == "pdf_content_1" ) - m.post( - f'{config.get("PAY_API_URL")}/1/receipts', - content=b'pdf_content_2', - status_code=201 + assert output["content"]["attachments"][1]["fileName"] == "Receipt.pdf" + assert ( + base64.b64decode(output["content"]["attachments"][1]["fileBytes"]).decode("utf-8") + == "pdf_content_2" ) - output = special_resolution_notification.process({ - 'filingId': filing.id, - 'type': 'specialResolution', - 'option': status - }, TOKEN) - assert 'content' in output - assert 'attachments' in output['content'] - assert len(output['content']['attachments']) == 2 - assert output['content']['attachments'][0]['fileName'] == 'Special Resolution Application.pdf' - assert (base64.b64decode(output['content']['attachments'][0]['fileBytes']).decode('utf-8') - == 'pdf_content_1') - assert output['content']['attachments'][1]['fileName'] == 'Receipt.pdf' - assert (base64.b64decode(output['content']['attachments'][1]['fileBytes']).decode('utf-8') - == 'pdf_content_2') diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_dissolution_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_dissolution_notification.py index 3739f860a2..06201d6765 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_dissolution_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_dissolution_notification.py @@ -21,91 +21,104 @@ from tests.unit import prep_dissolution_filing -@pytest.mark.parametrize('status,legal_type,submitter_role', [ - ('PAID', LegalEntity.EntityTypes.COMP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.COMP.value, None), - ('PAID', LegalEntity.EntityTypes.BCOMP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BCOMP.value, None), - ('PAID', LegalEntity.EntityTypes.BC_CCC.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BC_CCC.value, None), - ('PAID', LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), - ('COMPLETED', LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), - ('PAID', LegalEntity.EntityTypes.COOP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.COOP.value, None), - - ('PAID', LegalEntity.EntityTypes.COMP.value, 'staff'), - ('COMPLETED', LegalEntity.EntityTypes.COMP.value, 'staff'), -]) +@pytest.mark.parametrize( + "status,legal_type,submitter_role", + [ + ("PAID", LegalEntity.EntityTypes.COMP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.COMP.value, None), + ("PAID", LegalEntity.EntityTypes.BCOMP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BCOMP.value, None), + ("PAID", LegalEntity.EntityTypes.BC_CCC.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BC_CCC.value, None), + ("PAID", LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), + ("COMPLETED", LegalEntity.EntityTypes.BC_ULC_COMPANY.value, None), + ("PAID", LegalEntity.EntityTypes.COOP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.COOP.value, None), + ("PAID", LegalEntity.EntityTypes.COMP.value, "staff"), + ("COMPLETED", LegalEntity.EntityTypes.COMP.value, "staff"), + ], +) def test_dissolution_notification(app, session, status, legal_type, submitter_role): """Assert that the dissolution email processor for corps works as expected.""" # setup filing + business for email - legal_name = 'test business' - filing = prep_dissolution_filing(session, 'BC1234567', '1', status, legal_type, legal_name, submitter_role) - token = 'token' + legal_name = "test business" + filing = prep_dissolution_filing(session, "BC1234567", "1", status, legal_type, legal_name, submitter_role) + token = "token" # test processor - with patch.object(dissolution_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - with patch.object(dissolution_notification, 'get_recipient_from_auth', return_value='recipient@email.com'): - with patch.object(dissolution_notification, 'get_user_email_from_auth', return_value='user@email.com'): + with patch.object(dissolution_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + with patch.object(dissolution_notification, "get_recipient_from_auth", return_value="recipient@email.com"): + with patch.object(dissolution_notification, "get_user_email_from_auth", return_value="user@email.com"): email = dissolution_notification.process( - {'filingId': filing.id, 'type': 'dissolution', 'option': status}, token) - if status == 'PAID': - assert email['content']['subject'] == legal_name + ' - Voluntary dissolution' + {"filingId": filing.id, "type": "dissolution", "option": status}, token + ) + if status == "PAID": + assert email["content"]["subject"] == legal_name + " - Voluntary dissolution" else: - assert email['content']['subject'] == \ - legal_name + ' - Confirmation of Dissolution from the Business Registry' + assert ( + email["content"]["subject"] + == legal_name + " - Confirmation of Dissolution from the Business Registry" + ) if submitter_role: - assert f'{submitter_role}@email.com' in email['recipients'] + assert f"{submitter_role}@email.com" in email["recipients"] else: - assert 'user@email.com' in email['recipients'] - assert 'recipient@email.com' in email['recipients'] - assert 'custodian@email.com' in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert "user@email.com" in email["recipients"] + assert "recipient@email.com" in email["recipients"] + assert "custodian@email.com" in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'BC1234567' - assert mock_get_pdfs.call_args[0][2]['legalName'] == legal_name - assert mock_get_pdfs.call_args[0][2]['legalType'] == legal_type + assert mock_get_pdfs.call_args[0][2]["identifier"] == "BC1234567" + assert mock_get_pdfs.call_args[0][2]["legalName"] == legal_name + assert mock_get_pdfs.call_args[0][2]["legalType"] == legal_type assert mock_get_pdfs.call_args[0][3] == filing -@pytest.mark.parametrize('status,legal_type,submitter_role', [ - ('PAID', LegalEntity.EntityTypes.SOLE_PROP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.SOLE_PROP.value, None), - ('PAID', LegalEntity.EntityTypes.PARTNERSHIP.value, None), - ('COMPLETED', LegalEntity.EntityTypes.PARTNERSHIP.value, None), -]) +@pytest.mark.parametrize( + "status,legal_type,submitter_role", + [ + ("PAID", LegalEntity.EntityTypes.SOLE_PROP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.SOLE_PROP.value, None), + ("PAID", LegalEntity.EntityTypes.PARTNERSHIP.value, None), + ("COMPLETED", LegalEntity.EntityTypes.PARTNERSHIP.value, None), + ], +) def test_firms_dissolution_notification(app, session, status, legal_type, submitter_role): """Assert that the dissolution email processor for firms works as expected.""" # setup filing + business for email - legal_name = 'test business' - filing = prep_dissolution_filing(session, 'FM1234567', '1', status, legal_type, legal_name, submitter_role) - token = 'token' + legal_name = "test business" + filing = prep_dissolution_filing(session, "FM1234567", "1", status, legal_type, legal_name, submitter_role) + token = "token" # test processor - with patch.object(dissolution_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - with patch.object(dissolution_notification, 'get_recipient_from_auth', return_value='recipient@email.com'): - with patch.object(dissolution_notification, 'get_user_email_from_auth', return_value='user@email.com'): + with patch.object(dissolution_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + with patch.object(dissolution_notification, "get_recipient_from_auth", return_value="recipient@email.com"): + with patch.object(dissolution_notification, "get_user_email_from_auth", return_value="user@email.com"): email = dissolution_notification.process( - {'filingId': filing.id, 'type': 'dissolution', 'option': status}, token) - if status == 'PAID': - assert email['content']['subject'] == legal_name + \ - ' - Confirmation of Filing from the Business Registry' + {"filingId": filing.id, "type": "dissolution", "option": status}, token + ) + if status == "PAID": + assert ( + email["content"]["subject"] + == legal_name + " - Confirmation of Filing from the Business Registry" + ) else: - assert email['content']['subject'] == \ - legal_name + ' - Dissolution Documents from the Business Registry' + assert ( + email["content"]["subject"] + == legal_name + " - Dissolution Documents from the Business Registry" + ) if submitter_role: - assert f'{submitter_role}@email.com' in email['recipients'] + assert f"{submitter_role}@email.com" in email["recipients"] else: - assert 'user@email.com' in email['recipients'] - assert 'recipient@email.com' in email['recipients'] - assert 'cp@email.com' in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert "user@email.com" in email["recipients"] + assert "recipient@email.com" in email["recipients"] + assert "cp@email.com" in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'FM1234567' - assert mock_get_pdfs.call_args[0][2]['legalName'] == legal_name - assert mock_get_pdfs.call_args[0][2]['legalType'] == legal_type + assert mock_get_pdfs.call_args[0][2]["identifier"] == "FM1234567" + assert mock_get_pdfs.call_args[0][2]["legalName"] == legal_name + assert mock_get_pdfs.call_args[0][2]["legalType"] == legal_type assert mock_get_pdfs.call_args[0][3] == filing diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_filing_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_filing_notification.py index 5997af1e27..ab07458dd0 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_filing_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_filing_notification.py @@ -21,98 +21,107 @@ from tests.unit import prep_incorp_filing, prep_maintenance_filing -@pytest.mark.parametrize('status', [ - ('PAID'), - ('COMPLETED'), -]) +@pytest.mark.parametrize( + "status", + [ + ("PAID"), + ("COMPLETED"), + ], +) def test_incorp_notification(app, session, status): """Assert that the legal name is changed.""" # setup filing + business for email - filing = prep_incorp_filing(session, 'BC1234567', '1', status, 'BC') - token = 'token' + filing = prep_incorp_filing(session, "BC1234567", "1", status, "BC") + token = "token" # test processor - with patch.object(filing_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: + with patch.object(filing_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: email = filing_notification.process( - {'filingId': filing.id, 'type': 'incorporationApplication', 'option': status}, token) - if status == 'PAID': - assert 'comp_party@email.com' in email['recipients'] - assert email['content']['subject'] == 'Confirmation of Filing from the Business Registry' + {"filingId": filing.id, "type": "incorporationApplication", "option": status}, token + ) + if status == "PAID": + assert "comp_party@email.com" in email["recipients"] + assert email["content"]["subject"] == "Confirmation of Filing from the Business Registry" else: - assert email['content']['subject'] == 'Incorporation Documents from the Business Registry' + assert email["content"]["subject"] == "Incorporation Documents from the Business Registry" - assert 'test@test.com' in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert "test@test.com" in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - if status == 'PAID': - assert mock_get_pdfs.call_args[0][2]['identifier'].startswith('T') + if status == "PAID": + assert mock_get_pdfs.call_args[0][2]["identifier"].startswith("T") else: - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'BC1234567' + assert mock_get_pdfs.call_args[0][2]["identifier"] == "BC1234567" - assert mock_get_pdfs.call_args[0][2]['legalType'] == 'BC' + assert mock_get_pdfs.call_args[0][2]["legalType"] == "BC" assert mock_get_pdfs.call_args[0][3] == filing -@pytest.mark.parametrize('legal_type', [ - ('BEN'), - ('BC'), - ('ULC'), - ('CC'), -]) +@pytest.mark.parametrize( + "legal_type", + [ + ("BEN"), + ("BC"), + ("ULC"), + ("CC"), + ], +) def test_numbered_incorp_notification(app, session, legal_type): """Assert that the legal name is changed.""" # setup filing + business for email - filing = prep_incorp_filing(session, 'BC1234567', '1', 'PAID', legal_type=legal_type) - token = 'token' + filing = prep_incorp_filing(session, "BC1234567", "1", "PAID", legal_type=legal_type) + token = "token" # test processor - with patch.object(filing_notification, '_get_pdfs', return_value=[]): + with patch.object(filing_notification, "_get_pdfs", return_value=[]): email = filing_notification.process( - {'filingId': filing.id, 'type': 'incorporationApplication', 'option': 'PAID'}, token) + {"filingId": filing.id, "type": "incorporationApplication", "option": "PAID"}, token + ) - assert email['content']['body'] - assert LegalEntity.BUSINESSES[legal_type]['numberedDescription'] in email['content']['body'] + assert email["content"]["body"] + assert LegalEntity.BUSINESSES[legal_type]["numberedDescription"] in email["content"]["body"] -@pytest.mark.parametrize(['status', 'filing_type', 'submitter_role'], [ - ('PAID', 'annualReport', None), - ('PAID', 'changeOfAddress', None), - ('PAID', 'changeOfDirectors', None), - ('PAID', 'alteration', None), - ('COMPLETED', 'changeOfAddress', None), - ('COMPLETED', 'changeOfDirectors', None), - ('COMPLETED', 'alteration', None), - ('COMPLETED', 'alteration', 'staff') -]) +@pytest.mark.parametrize( + ["status", "filing_type", "submitter_role"], + [ + ("PAID", "annualReport", None), + ("PAID", "changeOfAddress", None), + ("PAID", "changeOfDirectors", None), + ("PAID", "alteration", None), + ("COMPLETED", "changeOfAddress", None), + ("COMPLETED", "changeOfDirectors", None), + ("COMPLETED", "alteration", None), + ("COMPLETED", "alteration", "staff"), + ], +) def test_maintenance_notification(app, session, mocker, status, filing_type, submitter_role): """Assert that the legal name is changed.""" # setup filing + business for email - filing = prep_maintenance_filing(session, 'BC1234567', '1', status, filing_type, submitter_role=submitter_role) - token = 'token' + filing = prep_maintenance_filing(session, "BC1234567", "1", status, filing_type, submitter_role=submitter_role) + token = "token" # test processor mocker.patch( - 'entity_emailer.email_processors.filing_notification.get_user_email_from_auth', - return_value='user@email.com') - with patch.object(filing_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - with patch.object(filing_notification, 'get_recipients', return_value='test@test.com') \ - as mock_get_recipients: - email = filing_notification.process( - {'filingId': filing.id, 'type': filing_type, 'option': status}, token) + "entity_emailer.email_processors.filing_notification.get_user_email_from_auth", return_value="user@email.com" + ) + with patch.object(filing_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + with patch.object(filing_notification, "get_recipients", return_value="test@test.com") as mock_get_recipients: + email = filing_notification.process({"filingId": filing.id, "type": filing_type, "option": status}, token) - if filing_type == 'alteration': + if filing_type == "alteration": if submitter_role: - assert f'{submitter_role}@email.com' in email['recipients'] + assert f"{submitter_role}@email.com" in email["recipients"] else: - assert 'user@email.com' in email['recipients'] + assert "user@email.com" in email["recipients"] - assert 'test@test.com' in email['recipients'] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert "test@test.com" in email["recipients"] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'BC1234567' - assert mock_get_pdfs.call_args[0][2]['legalType'] == LegalEntity.EntityTypes.BCOMP.value - assert mock_get_pdfs.call_args[0][2]['legalName'] == 'test business' + assert mock_get_pdfs.call_args[0][2]["identifier"] == "BC1234567" + assert mock_get_pdfs.call_args[0][2]["legalType"] == LegalEntity.EntityTypes.BCOMP.value + assert mock_get_pdfs.call_args[0][2]["legalName"] == "test business" assert mock_get_pdfs.call_args[0][3] == filing assert mock_get_recipients.call_args[0][0] == status assert mock_get_recipients.call_args[0][1] == filing.filing_json diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_mras_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_mras_notification.py index d9bf9bda73..fd287d897c 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_mras_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_mras_notification.py @@ -19,12 +19,11 @@ def test_mras_notification(app, session): """Assert that the legal name is changed.""" # setup filing + business for email - filing = prep_incorp_filing(session, 'BC1234567', '1', 'mras') + filing = prep_incorp_filing(session, "BC1234567", "1", "mras") # run processor - email = mras_notification.process( - {'filingId': filing.id, 'type': 'incorporationApplication', 'option': 'mras'}) + email = mras_notification.process({"filingId": filing.id, "type": "incorporationApplication", "option": "mras"}) # check email values - assert email['recipients'] == 'test@test.com' - assert email['content']['subject'] == 'BC Business Registry Partner Information' - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert email["recipients"] == "test@test.com" + assert email["content"]["subject"] == "BC Business Registry Partner Information" + assert email["content"]["body"] + assert email["content"]["attachments"] == [] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_nr_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_nr_notification.py index 917c559948..550cd75b05 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_nr_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_nr_notification.py @@ -22,70 +22,95 @@ from entity_emailer.email_processors import nr_notification from tests import MockResponse +default_legal_name = "TEST COMP" +default_names_array = [{"name": default_legal_name, "state": "NE"}] -default_legal_name = 'TEST COMP' -default_names_array = [{'name': default_legal_name, 'state': 'NE'}] - -@pytest.mark.parametrize(['option', 'nr_number', 'subject', 'expiration_date', 'refund_value', - 'expected_legal_name', 'names'], [ - ('before-expiry', 'NR 1234567', 'Expiring Soon', '2021-07-20T00:00:00+00:00', None, 'TEST2 Company Name', - [{'name': 'TEST Company Name', 'state': 'NE'}, {'name': 'TEST2 Company Name', 'state': 'APPROVED'}]), - ('before-expiry', 'NR 1234567', 'Expiring Soon', '2021-07-20T00:00:00+00:00', None, 'TEST3 Company Name', - [{'name': 'TEST3 Company Name', 'state': 'CONDITION'}, {'name': 'TEST4 Company Name', 'state': 'NE'}]), - ('expired', 'NR 1234567', 'Expired', None, None, 'TEST4 Company Name', - [{'name': 'TEST5 Company Name', 'state': 'NE'}, {'name': 'TEST4 Company Name', 'state': 'APPROVED'}]), - ('renewal', 'NR 1234567', 'Confirmation of Renewal', '2021-07-20T00:00:00+00:00', None, None, default_names_array), - ('upgrade', 'NR 1234567', 'Confirmation of Upgrade', None, None, None, default_names_array), - ('refund', 'NR 1234567', 'Refund request confirmation', None, '123.45', None, default_names_array) -]) -def test_nr_notification(app, session, option, nr_number, subject, expiration_date, refund_value, - expected_legal_name, names): +@pytest.mark.parametrize( + ["option", "nr_number", "subject", "expiration_date", "refund_value", "expected_legal_name", "names"], + [ + ( + "before-expiry", + "NR 1234567", + "Expiring Soon", + "2021-07-20T00:00:00+00:00", + None, + "TEST2 Company Name", + [{"name": "TEST Company Name", "state": "NE"}, {"name": "TEST2 Company Name", "state": "APPROVED"}], + ), + ( + "before-expiry", + "NR 1234567", + "Expiring Soon", + "2021-07-20T00:00:00+00:00", + None, + "TEST3 Company Name", + [{"name": "TEST3 Company Name", "state": "CONDITION"}, {"name": "TEST4 Company Name", "state": "NE"}], + ), + ( + "expired", + "NR 1234567", + "Expired", + None, + None, + "TEST4 Company Name", + [{"name": "TEST5 Company Name", "state": "NE"}, {"name": "TEST4 Company Name", "state": "APPROVED"}], + ), + ( + "renewal", + "NR 1234567", + "Confirmation of Renewal", + "2021-07-20T00:00:00+00:00", + None, + None, + default_names_array, + ), + ("upgrade", "NR 1234567", "Confirmation of Upgrade", None, None, None, default_names_array), + ("refund", "NR 1234567", "Refund request confirmation", None, "123.45", None, default_names_array), + ], +) +def test_nr_notification( + app, session, option, nr_number, subject, expiration_date, refund_value, expected_legal_name, names +): """Assert that the nr notification can be processed.""" nr_json = { - 'expirationDate': expiration_date, - 'names': names, - 'legalType': 'BC', - 'applicants': { - 'emailAddress': 'test@test.com' - } + "expirationDate": expiration_date, + "names": names, + "legalType": "BC", + "applicants": {"emailAddress": "test@test.com"}, } nr_response = MockResponse(nr_json, 200) # test processor - with patch.object(NameXService, 'query_nr_number', return_value=nr_response) \ - as mock_query_nr_number: - email = nr_notification.process({ - 'id': '123456789', - 'type': 'bc.registry.names.request', - 'source': f'/requests/{nr_number}', - 'identifier': nr_number, - 'data': { - 'request': { - 'nrNum': nr_number, - 'option': option, - 'refundValue': refund_value - } - } - }, option) + with patch.object(NameXService, "query_nr_number", return_value=nr_response) as mock_query_nr_number: + email = nr_notification.process( + { + "id": "123456789", + "type": "bc.registry.names.request", + "source": f"/requests/{nr_number}", + "identifier": nr_number, + "data": {"request": {"nrNum": nr_number, "option": option, "refundValue": refund_value}}, + }, + option, + ) - assert email['content']['subject'] == f'{nr_number} - {subject}' + assert email["content"]["subject"] == f"{nr_number} - {subject}" - assert 'test@test.com' in email['recipients'] - assert email['content']['body'] + assert "test@test.com" in email["recipients"] + assert email["content"]["body"] if option == nr_notification.Option.REFUND.value: - assert f'${refund_value} CAD' in email['content']['body'] - assert email['content']['attachments'] == [] + assert f"${refund_value} CAD" in email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_query_nr_number.call_args[0][0] == nr_number if option == nr_notification.Option.BEFORE_EXPIRY.value: - assert nr_number in email['content']['body'] - assert expected_legal_name in email['content']['body'] + assert nr_number in email["content"]["body"] + assert expected_legal_name in email["content"]["body"] exp_date = datetime.fromisoformat(expiration_date) exp_date_tz = LegislationDatetime.as_legislation_timezone(exp_date) assert_expiration_date = LegislationDatetime.format_as_report_string(exp_date_tz) - assert assert_expiration_date in email['content']['body'] + assert assert_expiration_date in email["content"]["body"] if option == nr_notification.Option.EXPIRED.value: - assert nr_number in email['content']['body'] - assert expected_legal_name in email['content']['body'] + assert nr_number in email["content"]["body"] + assert expected_legal_name in email["content"]["body"] diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_registration_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_registration_notification.py index 326b9de07c..afd7cc02fd 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_registration_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_registration_notification.py @@ -21,38 +21,41 @@ from tests.unit import prep_registration_filing -@pytest.mark.parametrize('status,legal_type', [ - ('PAID', LegalEntity.EntityTypes.SOLE_PROP.value), - ('COMPLETED', LegalEntity.EntityTypes.SOLE_PROP.value), - ('PAID', LegalEntity.EntityTypes.PARTNERSHIP.value), - ('COMPLETED', LegalEntity.EntityTypes.PARTNERSHIP.value), -]) +@pytest.mark.parametrize( + "status,legal_type", + [ + ("PAID", LegalEntity.EntityTypes.SOLE_PROP.value), + ("COMPLETED", LegalEntity.EntityTypes.SOLE_PROP.value), + ("PAID", LegalEntity.EntityTypes.PARTNERSHIP.value), + ("COMPLETED", LegalEntity.EntityTypes.PARTNERSHIP.value), + ], +) def test_registration_notification(app, session, status, legal_type): """Assert that the legal name is changed.""" # setup filing + business for email - legal_name = 'test business' - filing = prep_registration_filing(session, 'FM1234567', '1', status, legal_type, legal_name) - token = 'token' + legal_name = "test business" + filing = prep_registration_filing(session, "FM1234567", "1", status, legal_type, legal_name) + token = "token" # test processor - with patch.object(registration_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: + with patch.object(registration_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: email = registration_notification.process( - {'filingId': filing.id, 'type': 'registration', 'option': status}, token) - if status == 'PAID': - assert email['content']['subject'] == legal_name + ' - Confirmation of Filing from the Business Registry' + {"filingId": filing.id, "type": "registration", "option": status}, token + ) + if status == "PAID": + assert email["content"]["subject"] == legal_name + " - Confirmation of Filing from the Business Registry" else: - assert email['content']['subject'] == \ - legal_name + ' - Registration Documents from the Business Registry' + assert email["content"]["subject"] == legal_name + " - Registration Documents from the Business Registry" - assert 'joe@email.com' in email['recipients'] - if status == 'COMPLETED': - assert 'no_one@never.get' in email['recipients'] + assert "joe@email.com" in email["recipients"] + if status == "COMPLETED": + assert "no_one@never.get" in email["recipients"] if legal_type == LegalEntity.EntityTypes.PARTNERSHIP.value: - assert 'party@email.com' in email['recipients'] + assert "party@email.com" in email["recipients"] - assert email['content']['body'] - assert email['content']['attachments'] == [] + assert email["content"]["body"] + assert email["content"]["attachments"] == [] assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - if status == 'COMPLETED': - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'FM1234567' + if status == "COMPLETED": + assert mock_get_pdfs.call_args[0][2]["identifier"] == "FM1234567" assert mock_get_pdfs.call_args[0][3] == filing diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_restoration_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_restoration_notification.py index 49f1c7c5ac..2964a6c02b 100644 --- a/queue_services/entity-emailer/tests/unit/email_processors/test_restoration_notification.py +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_restoration_notification.py @@ -22,119 +22,117 @@ from entity_emailer.email_processors import restoration_notification from tests.unit import prep_restoration_filing - -LEGAL_NAME = 'test business' -BUS_ID = 'BC1234567' -TOKEN = 'token' -EXPECTED_EMAIL = 'joe@email.com' +LEGAL_NAME = "test business" +BUS_ID = "BC1234567" +TOKEN = "token" +EXPECTED_EMAIL = "joe@email.com" def test_complete_full_restoration_notification_includes_notice_of_articles_and_incorporation_cert(session, config): """Test completed full restoration notification.""" # setup filing + business for email - status = 'COMPLETED' - filing = prep_restoration_filing(BUS_ID, '1', 'BC', LEGAL_NAME) + status = "COMPLETED" + filing = prep_restoration_filing(BUS_ID, "1", "BC", LEGAL_NAME) with requests_mock.Mocker() as m: - m.get(f'{config.get("LEGAL_API_URL")}/businesses/{BUS_ID}/filings/{filing.id}?type=noticeOfArticles', - content=b'pdf_content_1', status_code=200) - m.get(f'{config.get("LEGAL_API_URL")}/businesses/{BUS_ID}/filings/{filing.id}?type=certificateOfRestoration', - content=b'pdf_content_2') - output = restoration_notification.process({ - 'filingId': filing.id, - 'type': 'restoration', - 'option': status - }, TOKEN) - assert 'content' in output - assert 'attachments' in output['content'] - assert len(output['content']['attachments']) == 2 - assert output['content']['attachments'][0]['fileName'] == 'Notice of Articles.pdf' - assert base64.b64decode(output['content']['attachments'][0]['fileBytes']).decode('utf-8') == 'pdf_content_1' - assert output['content']['attachments'][1]['fileName'] == 'Certificate of Restoration.pdf' - assert base64.b64decode(output['content']['attachments'][1]['fileBytes']).decode('utf-8') == 'pdf_content_2' + m.get( + f'{config.get("LEGAL_API_URL")}/businesses/{BUS_ID}/filings/{filing.id}?type=noticeOfArticles', + content=b"pdf_content_1", + status_code=200, + ) + m.get( + f'{config.get("LEGAL_API_URL")}/businesses/{BUS_ID}/filings/{filing.id}?type=certificateOfRestoration', + content=b"pdf_content_2", + ) + output = restoration_notification.process( + {"filingId": filing.id, "type": "restoration", "option": status}, TOKEN + ) + assert "content" in output + assert "attachments" in output["content"] + assert len(output["content"]["attachments"]) == 2 + assert output["content"]["attachments"][0]["fileName"] == "Notice of Articles.pdf" + assert base64.b64decode(output["content"]["attachments"][0]["fileBytes"]).decode("utf-8") == "pdf_content_1" + assert output["content"]["attachments"][1]["fileName"] == "Certificate of Restoration.pdf" + assert base64.b64decode(output["content"]["attachments"][1]["fileBytes"]).decode("utf-8") == "pdf_content_2" def test_paid_restoration_notification_includes_receipt_and_restoration_application_attachments(session, config): """Test PAID full restoration notification.""" # setup filing + business for email - status = 'PAID' - filing = prep_restoration_filing(BUS_ID, '1', 'BC', LEGAL_NAME) + status = "PAID" + filing = prep_restoration_filing(BUS_ID, "1", "BC", LEGAL_NAME) with requests_mock.Mocker() as m: - m.post(f'{config.get("PAY_API_URL")}/{filing.payment_token}/receipts', - content=b'pdf_content_1', status_code=201) - m.get(f'{config.get("LEGAL_API_URL")}/businesses/{BUS_ID}/filings/{filing.id}', - content=b'pdf_content_2', status_code=200) - output = restoration_notification.process({ - 'filingId': filing.id, - 'type': 'restoration', - 'option': status - }, TOKEN) - assert 'content' in output - assert 'attachments' in output['content'] - assert len(output['content']['attachments']) == 2 - assert output['content']['attachments'][0]['fileName'] == 'Restoration Application.pdf' - assert base64.b64decode(output['content']['attachments'][0]['fileBytes']).decode('utf-8') == 'pdf_content_2' - assert output['content']['attachments'][1]['fileName'] == 'Receipt.pdf' - assert base64.b64decode(output['content']['attachments'][1]['fileBytes']).decode('utf-8') == 'pdf_content_1' + m.post( + f'{config.get("PAY_API_URL")}/{filing.payment_token}/receipts', content=b"pdf_content_1", status_code=201 + ) + m.get( + f'{config.get("LEGAL_API_URL")}/businesses/{BUS_ID}/filings/{filing.id}', + content=b"pdf_content_2", + status_code=200, + ) + output = restoration_notification.process( + {"filingId": filing.id, "type": "restoration", "option": status}, TOKEN + ) + assert "content" in output + assert "attachments" in output["content"] + assert len(output["content"]["attachments"]) == 2 + assert output["content"]["attachments"][0]["fileName"] == "Restoration Application.pdf" + assert base64.b64decode(output["content"]["attachments"][0]["fileBytes"]).decode("utf-8") == "pdf_content_2" + assert output["content"]["attachments"][1]["fileName"] == "Receipt.pdf" + assert base64.b64decode(output["content"]["attachments"][1]["fileBytes"]).decode("utf-8") == "pdf_content_1" def test_completed_full_restoration_notification(session, config): """Test completed full restoration notification.""" # setup filing + business for email - status = 'COMPLETED' - filing = prep_restoration_filing(BUS_ID, '1', 'BC', LEGAL_NAME) + status = "COMPLETED" + filing = prep_restoration_filing(BUS_ID, "1", "BC", LEGAL_NAME) # test processor - with patch.object(restoration_notification, '_get_completed_pdfs', return_value=[]): - email_dict = restoration_notification.process({ - 'filingId': filing.id, - 'type': 'restoration', - 'option': status - }, TOKEN) - email = email_dict['content']['body'] - assert email_dict['content']['subject'] == 'test business - Restoration Documents from the Business Registry' - assert EXPECTED_EMAIL in email_dict['recipients'] - assert 'You have successfully restored your business with the BC Business Registry' in email + with patch.object(restoration_notification, "_get_completed_pdfs", return_value=[]): + email_dict = restoration_notification.process( + {"filingId": filing.id, "type": "restoration", "option": status}, TOKEN + ) + email = email_dict["content"]["body"] + assert email_dict["content"]["subject"] == "test business - Restoration Documents from the Business Registry" + assert EXPECTED_EMAIL in email_dict["recipients"] + assert "You have successfully restored your business with the BC Business Registry" in email def test_completed_extended_restoration_notification(session, config): """Test completed extended restoration notification includes specific wording.""" # setup filing + business for email - status = 'COMPLETED' - filing = prep_restoration_filing(BUS_ID, '1', 'BC', LEGAL_NAME, 'limitedRestorationExtension') - with patch.object(restoration_notification, '_get_completed_pdfs', return_value=[]): - email_dict = restoration_notification.process({ - 'filingId': filing.id, - 'type': 'restoration', - 'option': status - }, TOKEN) - email = email_dict['content']['body'] - assert 'You have successfully extended the period of restoration with the BC Business' in email + status = "COMPLETED" + filing = prep_restoration_filing(BUS_ID, "1", "BC", LEGAL_NAME, "limitedRestorationExtension") + with patch.object(restoration_notification, "_get_completed_pdfs", return_value=[]): + email_dict = restoration_notification.process( + {"filingId": filing.id, "type": "restoration", "option": status}, TOKEN + ) + email = email_dict["content"]["body"] + assert "You have successfully extended the period of restoration with the BC Business" in email @pytest.mark.parametrize( - 'restoration_type, attachment_name', + "restoration_type, attachment_name", [ - ('fullRestoration', 'Full Restoration Application'), - ('limitedRestoration', 'Limited Restoration Application'), - ('limitedRestorationExtension', 'Limited Restoration Extension Application'), - ('limitedRestorationToFull', 'Conversion to Full Restoration Application'), - ] + ("fullRestoration", "Full Restoration Application"), + ("limitedRestoration", "Limited Restoration Application"), + ("limitedRestorationExtension", "Limited Restoration Extension Application"), + ("limitedRestorationToFull", "Conversion to Full Restoration Application"), + ], ) def test_paid_full_restoration_notification(session, restoration_type, attachment_name): """Test PAID restoration notification.""" # setup filing + business for email - status = 'PAID' - filing = prep_restoration_filing('BC1234567', '1', 'BC', LEGAL_NAME, restoration_type) + status = "PAID" + filing = prep_restoration_filing("BC1234567", "1", "BC", LEGAL_NAME, restoration_type) # test processor - with patch.object(restoration_notification, '_get_paid_pdfs', return_value=[]): - email_dict = restoration_notification.process({ - 'filingId': filing.id, - 'type': 'restoration', - 'option': status - }, TOKEN) - email = email_dict['content']['body'] - assert EXPECTED_EMAIL in email_dict['recipients'] - assert email_dict['content']['subject'] == 'test business - Confirmation of Filing from the Business Registry' - assert EXPECTED_EMAIL in email_dict['recipients'] - assert 'You have successfully filed your restoration with the BC Business Registry' in email - assert email_dict['content']['attachments'] == [] + with patch.object(restoration_notification, "_get_paid_pdfs", return_value=[]): + email_dict = restoration_notification.process( + {"filingId": filing.id, "type": "restoration", "option": status}, TOKEN + ) + email = email_dict["content"]["body"] + assert EXPECTED_EMAIL in email_dict["recipients"] + assert email_dict["content"]["subject"] == "test business - Confirmation of Filing from the Business Registry" + assert EXPECTED_EMAIL in email_dict["recipients"] + assert "You have successfully filed your restoration with the BC Business Registry" in email + assert email_dict["content"]["attachments"] == [] assert attachment_name in email diff --git a/queue_services/entity-emailer/tests/unit/services/test_gcp_queue.py b/queue_services/entity-emailer/tests/unit/services/test_gcp_queue.py index babf550079..0ec78b2187 100644 --- a/queue_services/entity-emailer/tests/unit/services/test_gcp_queue.py +++ b/queue_services/entity-emailer/tests/unit/services/test_gcp_queue.py @@ -1,10 +1,9 @@ import base64 from contextlib import suppress -import pytest import flask -from simple_cloudevent import SimpleCloudEvent -from simple_cloudevent import to_queue_message +import pytest +from simple_cloudevent import SimpleCloudEvent, to_queue_message from entity_emailer.services.gcp_queue import GcpQueue @@ -19,9 +18,7 @@ } -@pytest.mark.parametrize( - "test_name,msg,expected", [("invalid", {}, False), ("valid", BASE_ENVELOPE, True)] -) +@pytest.mark.parametrize("test_name,msg,expected", [("invalid", {}, False), ("valid", BASE_ENVELOPE, True)]) def test_valid_envelope(test_name, msg, expected): """Test the validation the envelope.""" rv = GcpQueue.is_valid_envelope(msg) diff --git a/queue_services/entity-emailer/tests/unit/test_configuration.py b/queue_services/entity-emailer/tests/unit/test_configuration.py index 9798ebc946..55bd886124 100644 --- a/queue_services/entity-emailer/tests/unit/test_configuration.py +++ b/queue_services/entity-emailer/tests/unit/test_configuration.py @@ -36,27 +36,26 @@ Test-Suite to ensure that the Configuration Classes are working as expected. """ import pytest - from flask import current_app # testdata pattern is ({str: environment}, {expected return value}) TEST_ENVIRONMENT_DATA = [ - ('valid', 'development', current_app.config.get('DevConfig')), - ('valid', 'testing', current_app.config.get('TestConfig')), - ('valid', 'default', current_app.config.get('ProdConfig')), - ('valid', 'staging', current_app.config.get('ProdConfig')), - ('valid', 'production', current_app.config.get('ProdConfig')), - ('error', None, KeyError) + ("valid", "development", current_app.config.get("DevConfig")), + ("valid", "testing", current_app.config.get("TestConfig")), + ("valid", "default", current_app.config.get("ProdConfig")), + ("valid", "staging", current_app.config.get("ProdConfig")), + ("valid", "production", current_app.config.get("ProdConfig")), + ("error", None, KeyError), ] -@pytest.mark.parametrize('test_type,environment,expected', TEST_ENVIRONMENT_DATA) +@pytest.mark.parametrize("test_type,environment,expected", TEST_ENVIRONMENT_DATA) def test_get_named_config(test_type, environment, expected): """Assert that the named configurations can be loaded. Or that a KeyError is returned for missing config types. """ - if test_type == 'valid': + if test_type == "valid": assert isinstance(current_app.config.get_named_config(environment), expected) else: with pytest.raises(KeyError): diff --git a/queue_services/entity-emailer/tests/unit/test_version.py b/queue_services/entity-emailer/tests/unit/test_version.py index f1295ed025..dad8571999 100644 --- a/queue_services/entity-emailer/tests/unit/test_version.py +++ b/queue_services/entity-emailer/tests/unit/test_version.py @@ -42,6 +42,7 @@ PACKAGE_NAME = "entity_emailer" + def test_get_version(): """Assert that the version is returned correctly.""" rv = utils.get_run_version() diff --git a/queue_services/entity-emailer/tests/unit/test_worker.py b/queue_services/entity-emailer/tests/unit/test_worker.py index 78beb7bfc9..735c3d39f7 100644 --- a/queue_services/entity-emailer/tests/unit/test_worker.py +++ b/queue_services/entity-emailer/tests/unit/test_worker.py @@ -45,7 +45,6 @@ from simple_cloudevent import SimpleCloudEvent, to_queue_message from entity_emailer import worker -from entity_emailer.services import queue from entity_emailer.email_processors import ( ar_reminder_notification, correction_notification, @@ -54,14 +53,15 @@ nr_notification, special_resolution_notification, ) +from entity_emailer.services import queue from tests import MockResponse from tests.unit import ( + nested_session, prep_cp_special_resolution_correction_filing, prep_cp_special_resolution_filing, prep_incorp_filing, prep_maintenance_filing, ) -from tests.unit import nested_session def test_no_message(client): @@ -77,13 +77,7 @@ def test_no_message(client): source="fake-for-tests", subject="fake-subject", type="email", - data={ - "email": { - "filingId": "BC1234567", - "type": "bn", - "option": "COMPLETED" - } - }, + data={"email": {"filingId": "BC1234567", "type": "bn", "option": "COMPLETED"}}, ) # # This needs to mimic the envelope created by GCP PubSb when call a resource @@ -109,22 +103,24 @@ def test_simple_cloud_event(client, session, test_name, queue_envelope, expected assert rv.status_code == expected -@pytest.mark.parametrize('option', [ - ('PAID'), - ('COMPLETED'), -]) +@pytest.mark.parametrize( + "option", + [ + ("PAID"), + ("COMPLETED"), + ], +) def test_process_incorp_email(app, session, client, option): """Assert that an INCORP email msg is processed correctly.""" # Setup - filing = prep_incorp_filing(session, 'BC1234567', '1', option, 'BC') - token = '1' - email_msg = {'email': {'filingId': filing.id, - 'type': 'incorporationApplication', 'option': option}} + filing = prep_incorp_filing(session, "BC1234567", "1", option, "BC") + token = "1" + email_msg = {"email": {"filingId": filing.id, "type": "incorporationApplication", "option": option}} message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(filing_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(filing_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -134,51 +130,55 @@ def test_process_incorp_email(app, session, client, option): assert mock_get_pdfs.call_args[0][0] == option assert mock_get_pdfs.call_args[0][1] == token - if option == 'PAID': - assert mock_get_pdfs.call_args[0][2]['identifier'].startswith( - 'T') + if option == "PAID": + assert mock_get_pdfs.call_args[0][2]["identifier"].startswith("T") else: - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'BC1234567' + assert mock_get_pdfs.call_args[0][2]["identifier"] == "BC1234567" - assert mock_get_pdfs.call_args[0][2]['legalType'] == 'BC' + assert mock_get_pdfs.call_args[0][2]["legalType"] == "BC" assert mock_get_pdfs.call_args[0][3] == filing - if option == 'PAID': - assert 'comp_party@email.com' in mock_send_email.call_args[0][0]['recipients'] - assert mock_send_email.call_args[0][0]['content']['subject'] == \ - 'Confirmation of Filing from the Business Registry' + if option == "PAID": + assert "comp_party@email.com" in mock_send_email.call_args[0][0]["recipients"] + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] + == "Confirmation of Filing from the Business Registry" + ) else: - assert mock_send_email.call_args[0][0]['content']['subject'] == \ - 'Incorporation Documents from the Business Registry' - assert 'test@test.com' in mock_send_email.call_args[0][0]['recipients'] - assert mock_send_email.call_args[0][0]['content']['body'] - assert mock_send_email.call_args[0][0]['content']['attachments'] == [ - ] + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] + == "Incorporation Documents from the Business Registry" + ) + assert "test@test.com" in mock_send_email.call_args[0][0]["recipients"] + assert mock_send_email.call_args[0][0]["content"]["body"] + assert mock_send_email.call_args[0][0]["content"]["attachments"] == [] assert mock_send_email.call_args[0][1] == token -@pytest.mark.parametrize(['status', 'filing_type'], [ - ('PAID', 'annualReport'), - ('PAID', 'changeOfAddress'), - ('PAID', 'changeOfDirectors'), - ('COMPLETED', 'changeOfAddress'), - ('COMPLETED', 'changeOfDirectors') -]) +@pytest.mark.parametrize( + ["status", "filing_type"], + [ + ("PAID", "annualReport"), + ("PAID", "changeOfAddress"), + ("PAID", "changeOfDirectors"), + ("COMPLETED", "changeOfAddress"), + ("COMPLETED", "changeOfDirectors"), + ], +) def test_maintenance_notification(app, session, client, status, filing_type): """Assert that the legal name is changed.""" # Setup - filing = prep_maintenance_filing( - session, 'BC1234567', '1', status, filing_type) - token = 'token' - email_msg = {'email': {'filingId': filing.id, - 'type': f'{filing_type}', 'option': status}} + filing = prep_maintenance_filing(session, "BC1234567", "1", status, filing_type) + token = "token" + email_msg = {"email": {"filingId": filing.id, "type": f"{filing_type}", "option": status}} message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(filing_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: - with patch.object(filing_notification, 'get_recipients', return_value='test@test.com') \ - as mock_get_recipients: - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(filing_notification, "_get_pdfs", return_value=[]) as mock_get_pdfs: + with patch.object( + filing_notification, "get_recipients", return_value="test@test.com" + ) as mock_get_recipients: + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -189,43 +189,43 @@ def test_maintenance_notification(app, session, client, status, filing_type): assert mock_get_pdfs.call_args[0][0] == status assert mock_get_pdfs.call_args[0][1] == token - assert mock_get_pdfs.call_args[0][2]['identifier'] == 'BC1234567' - assert mock_get_pdfs.call_args[0][2]['legalType'] == LegalEntity.EntityTypes.BCOMP.value - assert mock_get_pdfs.call_args[0][2]['legalName'] == 'test business' + assert mock_get_pdfs.call_args[0][2]["identifier"] == "BC1234567" + assert mock_get_pdfs.call_args[0][2]["legalType"] == LegalEntity.EntityTypes.BCOMP.value + assert mock_get_pdfs.call_args[0][2]["legalName"] == "test business" assert mock_get_pdfs.call_args[0][3] == filing assert mock_get_recipients.call_args[0][0] == status assert mock_get_recipients.call_args[0][1] == filing.filing_json assert mock_get_recipients.call_args[0][2] == token - assert mock_send_email.call_args[0][0]['content']['subject'] - assert 'test@test.com' in mock_send_email.call_args[0][0]['recipients'] - assert mock_send_email.call_args[0][0]['content']['body'] - assert mock_send_email.call_args[0][0]['content']['attachments'] == [ - ] + assert mock_send_email.call_args[0][0]["content"]["subject"] + assert "test@test.com" in mock_send_email.call_args[0][0]["recipients"] + assert mock_send_email.call_args[0][0]["content"]["body"] + assert mock_send_email.call_args[0][0]["content"]["attachments"] == [] assert mock_send_email.call_args[0][1] == token -@pytest.mark.parametrize(['status', 'filing_type', 'identifier'], [ - ('COMPLETED', 'annualReport', 'BC1234567'), - ('PAID', 'changeOfAddress', 'CP1234567'), - ('PAID', 'changeOfDirectors', 'CP1234567'), - ('COMPLETED', 'changeOfAddress', 'CP1234567'), - ('COMPLETED', 'changeOfDirectors', 'CP1234567') -]) +@pytest.mark.parametrize( + ["status", "filing_type", "identifier"], + [ + ("COMPLETED", "annualReport", "BC1234567"), + ("PAID", "changeOfAddress", "CP1234567"), + ("PAID", "changeOfDirectors", "CP1234567"), + ("COMPLETED", "changeOfAddress", "CP1234567"), + ("COMPLETED", "changeOfDirectors", "CP1234567"), + ], +) def test_skips_notification(app, session, client, status, filing_type, identifier): """Assert that the legal name is changed.""" # Setup - filing = prep_maintenance_filing( - session, identifier, '1', status, filing_type) - token = 'token' - email_msg = {'email': {'filingId': filing.id, - 'type': f'{filing_type}', 'option': status}} + filing = prep_maintenance_filing(session, identifier, "1", status, filing_type) + token = "token" + email_msg = {"email": {"filingId": filing.id, "type": f"{filing_type}", "option": status}} message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(filing_notification, '_get_pdfs', return_value=[]): - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(filing_notification, "_get_pdfs", return_value=[]): + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -238,14 +238,13 @@ def test_skips_notification(app, session, client, status, filing_type, identifie def test_process_mras_email(app, session, client): """Assert that an MRAS email msg is processed correctly.""" # Setup - filing = prep_incorp_filing(session, 'BC1234567', '1', 'mras') - token = '1' - email_msg = {'email': {'filingId': filing.id, - 'type': 'incorporationApplication', 'option': 'mras'}} + filing = prep_incorp_filing(session, "BC1234567", "1", "mras") + token = "1" + email_msg = {"email": {"filingId": filing.id, "type": "incorporationApplication", "option": "mras"}} message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -253,36 +252,40 @@ def test_process_mras_email(app, session, client): # Check assert rv.status_code == HTTPStatus.OK - assert mock_send_email.call_args[0][0]['content']['subject'] == 'BC Business Registry Partner Information' - assert mock_send_email.call_args[0][0]['recipients'] == 'test@test.com' - assert mock_send_email.call_args[0][0]['content']['body'] - assert mock_send_email.call_args[0][0]['content']['attachments'] == [ - ] + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] == "BC Business Registry Partner Information" + ) + assert mock_send_email.call_args[0][0]["recipients"] == "test@test.com" + assert mock_send_email.call_args[0][0]["content"]["body"] + assert mock_send_email.call_args[0][0]["content"]["attachments"] == [] assert mock_send_email.call_args[0][1] == token -@pytest.mark.parametrize(['option', 'submitter_role'], [ - ('PAID', 'staff'), - ('COMPLETED', None), -]) +@pytest.mark.parametrize( + ["option", "submitter_role"], + [ + ("PAID", "staff"), + ("COMPLETED", None), + ], +) def test_process_special_resolution_email(app, session, client, option, submitter_role): """Assert that an special resolution email msg is processed correctly.""" # Setup - filing = prep_cp_special_resolution_filing( - 'CP1234567', '1', 'CP', 'TEST', submitter_role=submitter_role) - token = '1' - get_pdf_function = 'get_paid_pdfs' if option == 'PAID' else 'get_completed_pdfs' - email_msg = {'email': {'filingId': filing.id, - 'type': 'specialResolution', 'option': option}} + filing = prep_cp_special_resolution_filing("CP1234567", "1", "CP", "TEST", submitter_role=submitter_role) + token = "1" + get_pdf_function = "get_paid_pdfs" if option == "PAID" else "get_completed_pdfs" + email_msg = {"email": {"filingId": filing.id, "type": "specialResolution", "option": option}} message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): + with patch.object(AccountService, "get_bearer_token", return_value=token): with patch.object(special_resolution_notification, get_pdf_function, return_value=[]) as mock_get_pdfs: - with patch.object(special_resolution_notification, 'get_recipient_from_auth', - return_value='recipient@email.com'): - with patch.object(special_resolution_notification, 'get_user_email_from_auth', - return_value='user@email.com'): - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object( + special_resolution_notification, "get_recipient_from_auth", return_value="recipient@email.com" + ): + with patch.object( + special_resolution_notification, "get_user_email_from_auth", return_value="user@email.com" + ): + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -291,47 +294,52 @@ def test_process_special_resolution_email(app, session, client, option, submitte assert rv.status_code == HTTPStatus.OK assert mock_get_pdfs.call_args[0][0] == token - assert mock_get_pdfs.call_args[0][1]['identifier'] == 'CP1234567' + assert mock_get_pdfs.call_args[0][1]["identifier"] == "CP1234567" assert mock_get_pdfs.call_args[0][2] == filing - if option == 'PAID': - assert mock_send_email.call_args[0][0]['content']['subject'] == \ - 'TEST - Confirmation of Special Resolution from the Business Registry' + if option == "PAID": + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] + == "TEST - Confirmation of Special Resolution from the Business Registry" + ) else: - assert mock_send_email.call_args[0][0]['content']['subject'] == \ - 'TEST - Special Resolution Documents from the Business Registry' - assert 'recipient@email.com' in mock_send_email.call_args[0][0]['recipients'] + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] + == "TEST - Special Resolution Documents from the Business Registry" + ) + assert "recipient@email.com" in mock_send_email.call_args[0][0]["recipients"] if submitter_role: - assert f'{submitter_role}@email.com' in mock_send_email.call_args[0][0]['recipients'] + assert f"{submitter_role}@email.com" in mock_send_email.call_args[0][0]["recipients"] else: - assert 'user@email.com' in mock_send_email.call_args[0][0]['recipients'] - assert mock_send_email.call_args[0][0]['content']['body'] - assert mock_send_email.call_args[0][0]['content']['attachments'] == [ - ] + assert "user@email.com" in mock_send_email.call_args[0][0]["recipients"] + assert mock_send_email.call_args[0][0]["content"]["body"] + assert mock_send_email.call_args[0][0]["content"]["attachments"] == [] assert mock_send_email.call_args[0][1] == token -@pytest.mark.parametrize('option', [ - ('PAID'), - ('COMPLETED'), -]) +@pytest.mark.parametrize( + "option", + [ + ("PAID"), + ("COMPLETED"), + ], +) def test_process_correction_cp_sr_email(app, session, client, option): """Assert that a correction email msg is processed correctly.""" # Setup - identifier = 'CP1234567' - original_filing = prep_cp_special_resolution_filing( - identifier, '1', 'CP', 'TEST', submitter_role=None) - token = '1' + identifier = "CP1234567" + original_filing = prep_cp_special_resolution_filing(identifier, "1", "CP", "TEST", submitter_role=None) + token = "1" business = LegalEntity.find_by_identifier(identifier) - filing = prep_cp_special_resolution_correction_filing(session, business, original_filing.id, - '1', option, 'specialResolution') - email_msg = {'email': {'filingId': filing.id, - 'type': 'correction', 'option': option}} + filing = prep_cp_special_resolution_correction_filing( + session, business, original_filing.id, "1", option, "specialResolution" + ) + email_msg = {"email": {"filingId": filing.id, "type": "correction", "option": option}} message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(correction_notification, '_get_pdfs', return_value=[]): - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(correction_notification, "_get_pdfs", return_value=[]): + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -339,37 +347,43 @@ def test_process_correction_cp_sr_email(app, session, client, option): # Check assert rv.status_code == HTTPStatus.OK - if option == 'PAID': - assert mock_send_email.call_args[0][0]['content']['subject'] == \ - 'TEST - Confirmation of correction' + if option == "PAID": + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] == "TEST - Confirmation of correction" + ) else: - assert mock_send_email.call_args[0][0]['content']['subject'] == \ - 'TEST - Correction Documents from the Business Registry' - assert 'cp_sr@test.com' in mock_send_email.call_args[0][0]['recipients'] - assert mock_send_email.call_args[0][0]['content']['body'] - assert mock_send_email.call_args[0][0]['content']['attachments'] == [ - ] + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] + == "TEST - Correction Documents from the Business Registry" + ) + assert "cp_sr@test.com" in mock_send_email.call_args[0][0]["recipients"] + assert mock_send_email.call_args[0][0]["content"]["body"] + assert mock_send_email.call_args[0][0]["content"]["attachments"] == [] assert mock_send_email.call_args[0][1] == token def test_process_ar_reminder_email(app, session, client): """Assert that the ar reminder notification can be processed.""" # Setup - filing = prep_incorp_filing(session, 'BC1234567', '1', 'COMPLETED') + filing = prep_incorp_filing(session, "BC1234567", "1", "COMPLETED") business = LegalEntity.find_by_internal_id(filing.business_id) - business.legal_type = 'BC' - business.legal_name = 'test business' - token = 'token' - email_msg = {'email': { - 'businessId': filing.business_id, - 'type': 'annualReport', 'option': 'reminder', - 'arFee': '100', 'arYear': '2021' - }} + business.legal_type = "BC" + business.legal_name = "test business" + token = "token" + email_msg = { + "email": { + "businessId": filing.business_id, + "type": "annualReport", + "option": "reminder", + "arFee": "100", + "arYear": "2021", + } + } message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(ar_reminder_notification, 'get_recipient_from_auth', return_value='test@test.com'): - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(ar_reminder_notification, "get_recipient_from_auth", return_value="test@test.com"): + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -378,31 +392,30 @@ def test_process_ar_reminder_email(app, session, client): assert rv.status_code == HTTPStatus.OK call_args = mock_send_email.call_args - assert call_args[0][0]['content']['subject'] == 'test business 2021 Annual Report Reminder' - assert call_args[0][0]['recipients'] == 'test@test.com' - assert call_args[0][0]['content']['body'] - assert 'Dye & Durham' not in call_args[0][0]['content']['body'] - assert call_args[0][0]['content']['attachments'] == [] + assert call_args[0][0]["content"]["subject"] == "test business 2021 Annual Report Reminder" + assert call_args[0][0]["recipients"] == "test@test.com" + assert call_args[0][0]["content"]["body"] + assert "Dye & Durham" not in call_args[0][0]["content"]["body"] + assert call_args[0][0]["content"]["attachments"] == [] assert call_args[0][1] == token def test_process_bn_email(app, session, client): """Assert that a BN email msg is processed correctly.""" # Setup - identifier = 'BC1234567' - filing = prep_incorp_filing(session, identifier, '1', 'bn') + identifier = "BC1234567" + filing = prep_incorp_filing(session, identifier, "1", "bn") business = LegalEntity.find_by_identifier(identifier) - email_msg = {'email': {'filingId': None, 'type': 'businessNumber', - 'option': 'bn', 'identifier': 'BC1234567'}} + email_msg = {"email": {"filingId": None, "type": "businessNumber", "option": "bn", "identifier": "BC1234567"}} message = helper_create_cloud_event_envelope(data=email_msg) # Sanity check assert filing.id assert business.id - token = '1' + token = "1" - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -410,67 +423,88 @@ def test_process_bn_email(app, session, client): # Check assert rv.status_code == HTTPStatus.OK - assert 'comp_party@email.com' in mock_send_email.call_args[0][0]['recipients'] - assert 'test@test.com' in mock_send_email.call_args[0][0]['recipients'] - assert mock_send_email.call_args[0][0]['content']['subject'] == \ - f'{business.legal_name} - Business Number Information' - assert mock_send_email.call_args[0][0]['content']['body'] - assert mock_send_email.call_args[0][0]['content']['attachments'] == [ - ] - - -default_legal_name = 'TEST COMP' -default_names_array = [{'name': default_legal_name, 'state': 'NE'}] - - -@pytest.mark.parametrize(['option', 'nr_number', 'subject', 'expiration_date', 'refund_value', - 'expected_legal_name', 'names'], [ - ('before-expiry', 'NR 1234567', 'Expiring Soon', '2021-07-20T00:00:00+00:00', None, 'TEST2 Company Name', - [{'name': 'TEST Company Name', 'state': 'NE'}, {'name': 'TEST2 Company Name', 'state': 'APPROVED'}]), - ('before-expiry', 'NR 1234567', 'Expiring Soon', '2021-07-20T00:00:00+00:00', None, 'TEST3 Company Name', - [{'name': 'TEST3 Company Name', 'state': 'CONDITION'}, {'name': 'TEST4 Company Name', 'state': 'NE'}]), - ('expired', 'NR 1234567', 'Expired', None, None, 'TEST4 Company Name', - [{'name': 'TEST5 Company Name', 'state': 'NE'}, {'name': 'TEST4 Company Name', 'state': 'APPROVED'}]), - ('renewal', 'NR 1234567', 'Confirmation of Renewal', - '2021-07-20T00:00:00+00:00', None, None, default_names_array), - ('upgrade', 'NR 1234567', 'Confirmation of Upgrade', - None, None, None, default_names_array), - ('refund', 'NR 1234567', 'Refund request confirmation', - None, '123.45', None, default_names_array) -]) -def test_nr_notification(app, session, client, option, nr_number, subject, expiration_date, refund_value, - expected_legal_name, names): + assert "comp_party@email.com" in mock_send_email.call_args[0][0]["recipients"] + assert "test@test.com" in mock_send_email.call_args[0][0]["recipients"] + assert ( + mock_send_email.call_args[0][0]["content"]["subject"] + == f"{business.legal_name} - Business Number Information" + ) + assert mock_send_email.call_args[0][0]["content"]["body"] + assert mock_send_email.call_args[0][0]["content"]["attachments"] == [] + + +default_legal_name = "TEST COMP" +default_names_array = [{"name": default_legal_name, "state": "NE"}] + + +@pytest.mark.parametrize( + ["option", "nr_number", "subject", "expiration_date", "refund_value", "expected_legal_name", "names"], + [ + ( + "before-expiry", + "NR 1234567", + "Expiring Soon", + "2021-07-20T00:00:00+00:00", + None, + "TEST2 Company Name", + [{"name": "TEST Company Name", "state": "NE"}, {"name": "TEST2 Company Name", "state": "APPROVED"}], + ), + ( + "before-expiry", + "NR 1234567", + "Expiring Soon", + "2021-07-20T00:00:00+00:00", + None, + "TEST3 Company Name", + [{"name": "TEST3 Company Name", "state": "CONDITION"}, {"name": "TEST4 Company Name", "state": "NE"}], + ), + ( + "expired", + "NR 1234567", + "Expired", + None, + None, + "TEST4 Company Name", + [{"name": "TEST5 Company Name", "state": "NE"}, {"name": "TEST4 Company Name", "state": "APPROVED"}], + ), + ( + "renewal", + "NR 1234567", + "Confirmation of Renewal", + "2021-07-20T00:00:00+00:00", + None, + None, + default_names_array, + ), + ("upgrade", "NR 1234567", "Confirmation of Upgrade", None, None, None, default_names_array), + ("refund", "NR 1234567", "Refund request confirmation", None, "123.45", None, default_names_array), + ], +) +def test_nr_notification( + app, session, client, option, nr_number, subject, expiration_date, refund_value, expected_legal_name, names +): """Assert that the nr notification can be processed.""" # Setup nr_json = { - 'expirationDate': expiration_date, - 'names': names, - 'legalType': 'BC', - 'applicants': { - 'emailAddress': 'test@test.com' - } + "expirationDate": expiration_date, + "names": names, + "legalType": "BC", + "applicants": {"emailAddress": "test@test.com"}, } nr_response = MockResponse(nr_json, 200) - token = 'token' + token = "token" email_msg = { - 'id': '123456789', - 'type': 'bc.registry.names.request', - 'source': f'/requests/{nr_number}', - 'identifier': nr_number, - 'data': { - 'request': { - 'nrNum': nr_number, - 'option': option, - 'refundValue': refund_value - } - } + "id": "123456789", + "type": "bc.registry.names.request", + "source": f"/requests/{nr_number}", + "identifier": nr_number, + "data": {"request": {"nrNum": nr_number, "option": option, "refundValue": refund_value}}, } message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(NameXService, 'query_nr_number', return_value=nr_response) \ - as mock_query_nr_number: - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(NameXService, "query_nr_number", return_value=nr_response) as mock_query_nr_number: + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -479,67 +513,59 @@ def test_nr_notification(app, session, client, option, nr_number, subject, expir assert rv.status_code == HTTPStatus.OK call_args = mock_send_email.call_args - assert call_args[0][0]['content']['subject'] == f'{nr_number} - {subject}' - assert call_args[0][0]['recipients'] == 'test@test.com' - assert call_args[0][0]['content']['body'] + assert call_args[0][0]["content"]["subject"] == f"{nr_number} - {subject}" + assert call_args[0][0]["recipients"] == "test@test.com" + assert call_args[0][0]["content"]["body"] if option == nr_notification.Option.REFUND.value: - assert f'${refund_value} CAD' in call_args[0][0]['content']['body'] - assert call_args[0][0]['content']['attachments'] == [] + assert f"${refund_value} CAD" in call_args[0][0]["content"]["body"] + assert call_args[0][0]["content"]["attachments"] == [] assert mock_query_nr_number.call_args[0][0] == nr_number assert call_args[0][1] == token if option == nr_notification.Option.BEFORE_EXPIRY.value: - assert nr_number in call_args[0][0]['content']['body'] - assert expected_legal_name in call_args[0][0]['content']['body'] + assert nr_number in call_args[0][0]["content"]["body"] + assert expected_legal_name in call_args[0][0]["content"]["body"] exp_date = datetime.fromisoformat(expiration_date) - exp_date_tz = LegislationDatetime.as_legislation_timezone( - exp_date) - assert_expiration_date = LegislationDatetime.format_as_report_string( - exp_date_tz) - assert assert_expiration_date in call_args[0][0]['content']['body'] + exp_date_tz = LegislationDatetime.as_legislation_timezone(exp_date) + assert_expiration_date = LegislationDatetime.format_as_report_string(exp_date_tz) + assert assert_expiration_date in call_args[0][0]["content"]["body"] if option == nr_notification.Option.EXPIRED.value: - assert nr_number in call_args[0][0]['content']['body'] - assert expected_legal_name in call_args[0][0]['content']['body'] + assert nr_number in call_args[0][0]["content"]["body"] + assert expected_legal_name in call_args[0][0]["content"]["body"] def test_nr_receipt_notification(app, session, client): """Assert that the nr payment notification can be processed.""" # Setup - nr_number = 'NR 1234567' - email_address = 'test@test.com' + nr_number = "NR 1234567" + email_address = "test@test.com" nr_id = 12345 - nr_json = { - 'applicants': { - 'emailAddress': email_address - }, - 'id': nr_id - } + nr_json = {"applicants": {"emailAddress": email_address}, "id": nr_id} nr_response = MockResponse(nr_json, 200) - token = 'token' - payment_token = '1234' - pdfs = ['test'] + token = "token" + payment_token = "1234" + pdfs = ["test"] email_msg = { - 'id': '123456789', - 'type': 'bc.registry.names.request', - 'source': f'/requests/{nr_number}', - 'identifier': nr_number, - 'data': { - 'request': { - 'header': {'nrNum': nr_number}, - 'paymentToken': payment_token, - 'statusCode': 'DRAFT' # not used + "id": "123456789", + "type": "bc.registry.names.request", + "source": f"/requests/{nr_number}", + "identifier": nr_number, + "data": { + "request": { + "header": {"nrNum": nr_number}, + "paymentToken": payment_token, + "statusCode": "DRAFT", # not used } - } + }, } message = helper_create_cloud_event_envelope(data=email_msg) - with patch.object(AccountService, 'get_bearer_token', return_value=token): - with patch.object(NameXService, 'query_nr_number', return_value=nr_response) \ - as mock_query_nr_number: - with patch.object(name_request, 'get_nr_bearer_token', return_value=token): - with patch.object(name_request, '_get_pdfs', return_value=pdfs) as mock_pdf: - with patch.object(worker, 'send_email', return_value='success') as mock_send_email: + with patch.object(AccountService, "get_bearer_token", return_value=token): + with patch.object(NameXService, "query_nr_number", return_value=nr_response) as mock_query_nr_number: + with patch.object(name_request, "get_nr_bearer_token", return_value=token): + with patch.object(name_request, "_get_pdfs", return_value=pdfs) as mock_pdf: + with patch.object(worker, "send_email", return_value="success") as mock_send_email: with patch.object(queue, "publish", return_value={}): # TEST rv = client.post("/", json=message) @@ -551,54 +577,46 @@ def test_nr_receipt_notification(app, session, client): assert mock_pdf.call_args[0][1] == payment_token assert mock_query_nr_number.call_args[0][0] == nr_number call_args = mock_send_email.call_args - assert call_args[0][0]['content'][ - 'subject'] == f'{nr_number} - Receipt from Corporate Registry' - assert call_args[0][0]['recipients'] == email_address - assert call_args[0][0]['content']['body'] - assert call_args[0][0]['content']['attachments'] == pdfs + assert ( + call_args[0][0]["content"]["subject"] + == f"{nr_number} - Receipt from Corporate Registry" + ) + assert call_args[0][0]["recipients"] == email_address + assert call_args[0][0]["content"]["body"] + assert call_args[0][0]["content"]["attachments"] == pdfs assert call_args[0][1] == token -@pytest.mark.parametrize('email_msg', [ - ({}), - ({ - 'recipients': '', - 'requestBy': 'test@test.ca', - 'content': { - 'subject': 'test', - 'body': 'test', - 'attachments': [] - }}), - ({ - 'recipients': '', - 'requestBy': 'test@test.ca', - 'content': {}}), - ({ - 'recipients': '', - 'requestBy': 'test@test.ca', - 'content': { - 'subject': 'test', - 'body': {}, - 'attachments': [] - }}), - ({ - 'requestBy': 'test@test.ca', - 'content': { - 'subject': 'test', - 'body': 'test', - 'attachments': [] - }}), - ({ - 'recipients': 'test@test.ca', - 'requestBy': 'test@test.ca'}), - ({ - 'recipients': 'test@test.ca', - 'requestBy': 'test@test.ca', - 'content': { - 'subject': 'test', - 'attachments': [] - }}) -]) +@pytest.mark.parametrize( + "email_msg", + [ + ({}), + ( + { + "recipients": "", + "requestBy": "test@test.ca", + "content": {"subject": "test", "body": "test", "attachments": []}, + } + ), + ({"recipients": "", "requestBy": "test@test.ca", "content": {}}), + ( + { + "recipients": "", + "requestBy": "test@test.ca", + "content": {"subject": "test", "body": {}, "attachments": []}, + } + ), + ({"requestBy": "test@test.ca", "content": {"subject": "test", "body": "test", "attachments": []}}), + ({"recipients": "test@test.ca", "requestBy": "test@test.ca"}), + ( + { + "recipients": "test@test.ca", + "requestBy": "test@test.ca", + "content": {"subject": "test", "attachments": []}, + } + ), + ], +) def test_send_email_with_incomplete_payload(app, session, client, email_msg): """Assert that the email not have body can not be processed.""" # Setup @@ -631,9 +649,7 @@ def helper_create_cloud_event_envelope( } } if not ce: - ce = SimpleCloudEvent( - id=cloud_event_id, source=source, subject=subject, type=type, data=data - ) + ce = SimpleCloudEvent(id=cloud_event_id, source=source, subject=subject, type=type, data=data) # # This needs to mimic the envelope created by GCP PubSb when call a resource # diff --git a/queue_services/entity-filer/__init__.py b/queue_services/entity-filer/__init__.py new file mode 100644 index 0000000000..c09781299b --- /dev/null +++ b/queue_services/entity-filer/__init__.py @@ -0,0 +1,15 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Init.""" +# pylint: disable=invalid-name diff --git a/queue_services/entity-filer/poetry.lock b/queue_services/entity-filer/poetry.lock index 79045ce3d8..afaa13e483 100644 --- a/queue_services/entity-filer/poetry.lock +++ b/queue_services/entity-filer/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -19,63 +19,6 @@ typing-extensions = ">=4" [package.extras] tz = ["python-dateutil"] -[[package]] -name = "argon2-cffi" -version = "23.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - [[package]] name = "arrow" version = "1.3.0" @@ -106,6 +49,17 @@ files = [ {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, ] +[[package]] +name = "astroid" +version = "3.0.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, +] + [[package]] name = "attrs" version = "23.1.0" @@ -141,31 +95,58 @@ setuptools = {version = "*", markers = "python_version >= \"3.12\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "bandit" +version = "1.7.7" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, + {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -177,7 +158,7 @@ platformdirs = ">=2" [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -260,70 +241,6 @@ files = [ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - [[package]] name = "charset-normalizer" version = "3.3.2" @@ -448,6 +365,70 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "datedelta" version = "1.4" @@ -459,6 +440,20 @@ files = [ {file = "datedelta-1.4.tar.gz", hash = "sha256:3f1ef319ead642a76a3cab731917bf14a0ced0d91943f33ff57ae615837cab97"}, ] +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + [[package]] name = "dpath" version = "2.1.6" @@ -470,6 +465,38 @@ files = [ {file = "dpath-2.1.6.tar.gz", hash = "sha256:f1e07c72e8605c6a9e80b64bc8f42714de08a789c7de417e49c3f87a19692e47"}, ] +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + [[package]] name = "flask" version = "2.3.3" @@ -876,6 +903,20 @@ files = [ [package.dependencies] arrow = ">=0.15.0" +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -977,6 +1018,30 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.3" @@ -1046,6 +1111,28 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1079,6 +1166,17 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + [[package]] name = "pg8000" version = "1.30.3" @@ -1186,6 +1284,17 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + [[package]] name = "pycountry" version = "22.3.5" @@ -1199,6 +1308,59 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pylint" +version = "3.0.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, +] + +[package.dependencies] +astroid = ">=3.0.1,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pytest" version = "7.4.3" @@ -1237,6 +1399,24 @@ pytest = ">=7.0.0" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "pytest-mock" version = "3.12.0" @@ -1293,6 +1473,66 @@ files = [ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "referencing" version = "0.30.2" @@ -1394,6 +1634,24 @@ files = [ {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, ] +[[package]] +name = "rich" +version = "13.7.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "rpds-py" version = "0.12.0" @@ -1723,6 +1981,20 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, + {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + [[package]] name = "strict-rfc3339" version = "0.7" @@ -1733,6 +2005,17 @@ files = [ {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, ] +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, +] + [[package]] name = "types-python-dateutil" version = "2.8.19.14" @@ -1820,4 +2103,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "4856fed227fc89b6edc7ede707ca2823dc1c80ad4ccef63e81b2b551fc1da41a" +content-hash = "ca49998a51db9743ea622d92335cc718487a283d7fa3af97ec13c3250a79376e" diff --git a/queue_services/entity-filer/pyproject.toml b/queue_services/entity-filer/pyproject.toml index 5bba16e024..4fe49555a5 100644 --- a/queue_services/entity-filer/pyproject.toml +++ b/queue_services/entity-filer/pyproject.toml @@ -33,15 +33,137 @@ gunicorn = "^21.2.0" [tool.poetry.group.dev.dependencies] -black = "^23.7.0" flask-migrate = "^4.0.4" freezegun = "^1.2.2" pytest = "^7.4.0" pytest-asyncio = "^0.21.1" pytest-mock = "^3.11.1" +pytest-cov = "^4.0.0" requests = "^2.31.0" requests-mock = "^1.11.0" +black = "^23.12.1" +pylint = "^3.0.3" +bandit = "^1.7.6" +flake8-pyproject = "^1.2.3" +isort = "^5.13.2" +[tool.bandit] +exclude_dirs = [".venv","tests"] +skips = ["B104"] + +[tool.flake8] +ignore = ["F401","E402", "Q000", "E203", "W503"] +exclude = [ + ".venv", + ".git", + ".history", + "devops", + "*migrations*", +] +per-file-ignores = [ + "__init__.py:F401", + "*.py:B902" +] +max-line-length = 120 +docstring-min-length=10 +count = true + +[tool.black] +target-version = ["py310", "py311", "py312"] +line-length = 120 +include = '\.pyi?$' +extend-exclude = ''' +/( + # The following are specific to Black, you probably don't want those. + migrations + | devops +)/ +''' + +[tool.isort] +atomic = true +profile = "black" +line_length = 120 +skip_gitignore = true +skip_glob = ["migrations", "devops"] + +[tool.pylint.main] +fail-under = 10 +max-line-length = 120 +ignore = [ "migrations", "devops", "tests"] +ignore-patterns = ["^\\.#"] +ignored-modules= ["flask_sqlalchemy", "sqlalchemy", "SQLAlchemy" , "alembic", "scoped_session"] +ignored-classes= "scoped_session" +ignore-long-lines = "^\\s*(# )??$" +extension-pkg-whitelist = "pydantic" +notes = ["FIXME","XXX","TODO"] +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +disable = "C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101" +argument-naming-style = "snake_case" +attr-naming-style = "snake_case" +class-attribute-naming-style = "any" +class-const-naming-style = "UPPER_CASE" +class-naming-style = "PascalCase" +const-naming-style = "UPPER_CASE" +function-naming-style = "snake_case" +inlinevar-naming-style = "any" +method-naming-style = "snake_case" +module-naming-style = "any" +variable-naming-style = "snake_case" +docstring-min-length = -1 +good-names = ["i", "j", "k", "ex", "Run", "_"] +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +valid-classmethod-first-arg = ["cls"] +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pytest.ini_options] +minversion = "2.0" +testpaths = [ + "tests", +] +addopts = "--verbose --strict -p no:warnings --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml" +python_files = [ + "test*.py" +] +norecursedirs = [ + ".git", ".tox", "venv*", "requirements*", "build", +] +log_cli = true +log_cli_level = "1" +filterwarnings = [ + "ignore::UserWarning" +] +markers = [ + "slow", + "serial", +] + +[tool.coverage.run] +branch = true +source = [ + "src/entity-filer", +] +omit = [ + "wsgi.py", + "gunicorn_config.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "from", + "import", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + 'if __name__ == "__main__":', +] ## [[tool.poetry.source]] ## name = "local-pypi" diff --git a/queue_services/entity-filer/src/entity_filer/__init__.py b/queue_services/entity-filer/src/entity_filer/__init__.py index d0f769abf4..68b80d7b21 100644 --- a/queue_services/entity-filer/src/entity_filer/__init__.py +++ b/queue_services/entity-filer/src/entity_filer/__init__.py @@ -36,17 +36,16 @@ This module is the service worker for applying filings to the Business Database structure. """ import sentry_sdk -from flask import Flask from business_model import db +from flask import Flask -# from sentry_sdk.integrations.flask import FlaskIntegration - -from .config import Config -from .config import Production +from .config import Config, Production from .resources import register_endpoints from .services import queue from .translations import babel +# from sentry_sdk.integrations.flask import FlaskIntegration + def create_app(environment: Config = Production, **kwargs) -> Flask: """Return a configured Flask App using the Factory method.""" diff --git a/queue_services/entity-filer/src/entity_filer/common/enum.py b/queue_services/entity-filer/src/entity_filer/common/enum.py index 051a27edd8..7e370f34ba 100644 --- a/queue_services/entity-filer/src/entity_filer/common/enum.py +++ b/queue_services/entity-filer/src/entity_filer/common/enum.py @@ -1,7 +1,6 @@ """Enum Utilities.""" from enum import auto # noqa: F401 -from enum import Enum -from enum import EnumMeta +from enum import Enum, EnumMeta from typing import Optional @@ -11,6 +10,7 @@ class BaseMeta(EnumMeta): def __contains__(self, other): """Return True if 'in' the Enum.""" try: + # pylint: disable-next=no-value-for-parameter self(other) except ValueError: return False @@ -37,6 +37,7 @@ def get_enum_by_name(cls, value: str) -> Optional[str]: return enum_value return None + # pylint: disable-next=no-self-argument def _generate_next_value_(name, start, count, last_values): """Return the name of the key.""" return name diff --git a/queue_services/entity-filer/src/entity_filer/config.py b/queue_services/entity-filer/src/entity_filer/config.py index 48a3ac202d..0a2b441b9d 100644 --- a/queue_services/entity-filer/src/entity_filer/config.py +++ b/queue_services/entity-filer/src/entity_filer/config.py @@ -43,7 +43,6 @@ from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) @@ -68,11 +67,11 @@ class Config: # pylint: disable=too-few-public-methods # POSTGRESQL if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): - SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" - else: SQLALCHEMY_DATABASE_URI = ( - f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" + f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" ) + else: + SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" # API Endpoints AUTH_API_URL = os.getenv("AUTH_API_URL", "") diff --git a/queue_services/entity-filer/src/entity_filer/exceptions/__init__.py b/queue_services/entity-filer/src/entity_filer/exceptions/__init__.py index 5be0d2d8ca..1dbeaaac74 100644 --- a/queue_services/entity-filer/src/entity_filer/exceptions/__init__.py +++ b/queue_services/entity-filer/src/entity_filer/exceptions/__init__.py @@ -24,7 +24,6 @@ from .default_exception import DefaultException from .error_messages import ErrorCode, get_error_message - __all__ = ( "ApiConnectionException", "BusinessException", diff --git a/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/__init__.py b/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/__init__.py index f1f9ca1b8c..f0a83ff3fd 100644 --- a/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/__init__.py +++ b/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/__init__.py @@ -20,7 +20,6 @@ from .codes import ErrorCode from .utils import get_error_message - __all__ = ( "ErrorCode", "get_error_message", diff --git a/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/codes.py b/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/codes.py index 9f34383afb..322a6c04d6 100644 --- a/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/codes.py +++ b/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/codes.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """Application Common Error Messages.""" -from entity_filer.common.enum import BaseEnum -from entity_filer.common.enum import auto +from entity_filer.common.enum import BaseEnum, auto class ErrorCode(BaseEnum): diff --git a/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/messages.py b/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/messages.py index bdcd02da28..7fb703a525 100644 --- a/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/messages.py +++ b/queue_services/entity-filer/src/entity_filer/exceptions/error_messages/messages.py @@ -14,7 +14,6 @@ """Application Common Error Messages.""" from .codes import ErrorCode - ERROR_MESSAGES: dict = { ErrorCode.FILING_NOT_FOUND: "Filing: {filing_id} not found for: {identifier}", ErrorCode.GENERAL_UNRECOVERABLE_ERROR: "Unrecoverable error for Filing: {filing_id}", diff --git a/queue_services/entity-filer/src/entity_filer/filing_meta.py b/queue_services/entity-filer/src/entity_filer/filing_meta.py index 72c1886cb6..4bb04d86e5 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_meta.py +++ b/queue_services/entity-filer/src/entity_filer/filing_meta.py @@ -52,10 +52,7 @@ def to_camel(string: str) -> Optional[str]: if not isinstance(string, str): return None - return "".join( - word.lower() if idx == 0 else word.capitalize() - for idx, word in enumerate(string.split("_")) - ) + return "".join(word.lower() if idx == 0 else word.capitalize() for idx, word in enumerate(string.split("_"))) def to_snake(string: str) -> Optional[str]: diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/admin_freeze.py b/queue_services/entity-filer/src/entity_filer/filing_processors/admin_freeze.py index 45a2a3891a..149f4c6b9b 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/admin_freeze.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/admin_freeze.py @@ -16,16 +16,13 @@ from typing import Dict import dpath +from business_model import Filing, LegalEntity -# from entity_filer.exceptions import DefaultException, logger -from business_model import LegalEntity, Filing - +from entity_filer.exceptions import DefaultException, logger from entity_filer.filing_meta import FilingMeta -def process( - business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta -): +def process(business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): """Render the admin freeze filing unto the model objects.""" if not (admin_freeze_filing := filing.get("adminFreeze")): print("Could not find adminFreeze in: %s", filing) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/agm_extension.py b/queue_services/entity-filer/src/entity_filer/filing_processors/agm_extension.py index a49c70dd6c..166ec8dbbf 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/agm_extension.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/agm_extension.py @@ -28,32 +28,20 @@ def process(filing: Dict, filing_meta: FilingMeta): "extReqForAgmYear": filing["agmExtension"]["extReqForAgmYear"], "totalApprovedExt": filing["agmExtension"]["totalApprovedExt"], "extensionDuration": filing["agmExtension"]["extensionDuration"], - "isFinalExtension": _check_final_extension(filing) + "isFinalExtension": _check_final_extension(filing), } if prev_agm_ref_date := dpath.util.get(filing, "/agmExtension/prevAgmRefDate", default=None): - filing_meta.agm_extension = { - **filing_meta.agm_extension, - "prevAgmRefDate": prev_agm_ref_date - } + filing_meta.agm_extension = {**filing_meta.agm_extension, "prevAgmRefDate": prev_agm_ref_date} if curr_ext_expiry_date := dpath.util.get(filing, "/agmExtension/expireDateCurrExt", default=None): - filing_meta.agm_extension = { - **filing_meta.agm_extension, - "expireDateCurrExt": curr_ext_expiry_date - } + filing_meta.agm_extension = {**filing_meta.agm_extension, "expireDateCurrExt": curr_ext_expiry_date} if intended_agm_date := dpath.util.get(filing, "/agmExtension/intendedAgmDate", default=None): - filing_meta.agm_extension = { - **filing_meta.agm_extension, - "intendedAgmDate": intended_agm_date - } + filing_meta.agm_extension = {**filing_meta.agm_extension, "intendedAgmDate": intended_agm_date} if expiry_date_approved_ext := dpath.util.get(filing, "/agmExtension/expireDateApprovedExt", default=None): - filing_meta.agm_extension = { - **filing_meta.agm_extension, - "expireDateApprovedExt": expiry_date_approved_ext - } + filing_meta.agm_extension = {**filing_meta.agm_extension, "expireDateApprovedExt": expiry_date_approved_ext} def _check_final_extension(filing: Dict) -> bool: diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/agm_location_change.py b/queue_services/entity-filer/src/entity_filer/filing_processors/agm_location_change.py index 455e0c3904..4e1f1038f9 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/agm_location_change.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/agm_location_change.py @@ -23,5 +23,5 @@ def process(filing: Dict, filing_meta: FilingMeta): filing_meta.agm_location_change = { "year": filing["agmLocationChange"]["year"], "agmLocation": filing["agmLocationChange"]["agmLocation"], - "reason": filing["agmLocationChange"]["reason"] + "reason": filing["agmLocationChange"]["reason"], } diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/alteration.py b/queue_services/entity-filer/src/entity_filer/filing_processors/alteration.py index 3bc89f977e..abc517d64f 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/alteration.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/alteration.py @@ -17,7 +17,7 @@ import dpath import sentry_sdk -from business_model import LegalEntity, Filing +from business_model import Filing, LegalEntity from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import ( @@ -105,9 +105,7 @@ def process( rules_file_key = dpath.util.get(filing, "/alteration/rulesFileKey") rules_file_name = dpath.util.get(filing, "/alteration/rulesFileName") if rules_file_key: - rules_and_memorandum.update_rules( - business, filing_submission, rules_file_key, rules_file_name - ) + rules_and_memorandum.update_rules(business, filing_submission, rules_file_key, rules_file_name) filing_meta.alteration = { **filing_meta.alteration, **{"uploadNewRules": True}, @@ -115,9 +113,7 @@ def process( with suppress(IndexError, KeyError, TypeError): memorandum_file_key = dpath.util.get(filing, "/alteration/memorandumFileKey") - rules_and_memorandum.update_memorandum( - business, filing_submission, memorandum_file_key - ) + rules_and_memorandum.update_memorandum(business, filing_submission, memorandum_file_key) def post_process(business: LegalEntity, filing: Filing, correction: bool = False): diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/amalgamation_application.py b/queue_services/entity-filer/src/entity_filer/filing_processors/amalgamation_application.py index 1a5e3dbce9..5c48d1b7f1 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/amalgamation_application.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/amalgamation_application.py @@ -18,17 +18,16 @@ from typing import Dict import sentry_sdk +from business_model import AmalgamatingBusiness, Amalgamation, Document, Filing, LegalEntity, RegistrationBootstrap, db -# from entity_filer.exceptions import DefaultException -from business_model import db, AmalgamatingBusiness, Amalgamation, LegalEntity, Document, Filing, RegistrationBootstrap - -# from legal_api.services.bootstrap import AccountService - +from entity_filer.exceptions import DefaultException from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import aliases, filings, legal_entity_info, shares from entity_filer.filing_processors.filing_components.offices import update_offices from entity_filer.filing_processors.filing_components.parties import merge_all_parties +# from legal_api.services.bootstrap import AccountService + def update_affiliation(business: LegalEntity, filing: Filing): """Create an affiliation for the business and remove the bootstrap.""" @@ -87,8 +86,9 @@ def create_amalgamating_businesses(amalgamation_filing: Dict, amalgamation: Amal for amalgamating_business_json in amalgamating_businesses_json: amalgamating_business = AmalgamatingBusiness() amalgamating_business.role = amalgamating_business_json.get("role") - if ((identifier := amalgamating_business_json.get("identifier")) and - (business := LegalEntity.find_by_identifier(identifier))): + if (identifier := amalgamating_business_json.get("identifier")) and ( + business := LegalEntity.find_by_identifier(identifier) + ): amalgamating_business.legal_entity_id = business.id dissolve_amalgamating_business(business, filing_rec) else: @@ -111,10 +111,12 @@ def dissolve_amalgamating_business(business: LegalEntity, filing_rec: Filing): db.session.add(business) -def process(business: LegalEntity, # pylint: disable=too-many-branches - filing: Dict, - filing_rec: Filing, - filing_meta: FilingMeta): # pylint: disable=too-many-branches +def process( + business: LegalEntity, # pylint: disable=too-many-branches + filing: Dict, + filing_rec: Filing, + filing_meta: FilingMeta, +): # pylint: disable=too-many-branches """Process the incoming amalgamation application filing.""" # Extract the filing information for amalgamation amalgamation_filing = filing.get("filing", {}).get("amalgamationApplication") @@ -123,10 +125,12 @@ def process(business: LegalEntity, # pylint: disable=too-many-branches if not amalgamation_filing: raise DefaultException( - f"AmalgamationApplication legal_filing:amalgamationApplication missing from {filing_rec.id}") + f"AmalgamationApplication legal_filing:amalgamationApplication missing from {filing_rec.id}" + ) if business: raise DefaultException( - f"LegalEntity Already Exist: AmalgamationApplication legal_filing:amalgamationApplication {filing_rec.id}") + f"LegalEntity Already Exist: AmalgamationApplication legal_filing:amalgamationApplication {filing_rec.id}" + ) business_info_obj = amalgamation_filing.get("nameRequest") @@ -134,13 +138,12 @@ def process(business: LegalEntity, # pylint: disable=too-many-branches corp_num = legal_entity_info.get_next_corp_num(business_info_obj["legalType"]) if not corp_num: raise DefaultException( - f"amalgamationApplication {filing_rec.id} unable to get a business amalgamationApplication number.") + f"amalgamationApplication {filing_rec.id} unable to get a business amalgamationApplication number." + ) # Initial insert of the business record business = LegalEntity() - business = legal_entity_info.update_legal_entity_info( - corp_num, business, business_info_obj, filing_rec - ) + business = legal_entity_info.update_legal_entity_info(corp_num, business, business_info_obj, filing_rec) business.state = LegalEntity.State.ACTIVE amalgamation.filing_id = filing_rec.id @@ -151,9 +154,10 @@ def process(business: LegalEntity, # pylint: disable=too-many-branches business.amalgamation.append(amalgamation) if nr_number := business_info_obj.get("nrNumber", None): - filing_meta.amalgamation_application = {**filing_meta.amalgamation_application, - **{"nrNumber": nr_number, - "legalName": business_info_obj.get("legalName", None)}} + filing_meta.amalgamation_application = { + **filing_meta.amalgamation_application, + **{"nrNumber": nr_number, "legalName": business_info_obj.get("legalName", None)}, + } if not business: raise DefaultException(f"amalgamationApplication {filing_rec.id}, Unable to create business.") diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/annual_report.py b/queue_services/entity-filer/src/entity_filer/filing_processors/annual_report.py index d3b9392cda..46ab7a5124 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/annual_report.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/annual_report.py @@ -20,10 +20,10 @@ # from entity_queue_common.service_utils import logger from business_model import LegalEntity -# from legal_api.services.filings import validations - from entity_filer.filing_meta import FilingMeta +# from legal_api.services.filings import validations + def process(legal_entity: LegalEntity, filing: Dict, filing_meta: FilingMeta): """Render the annual_report onto the legal_entity model objects.""" @@ -35,9 +35,7 @@ def process(legal_entity: LegalEntity, filing: Dict, filing_meta: FilingMeta): ar_date = datetime.date.fromisoformat(ar_date) else: # should never get here (schema validation should prevent this from making it to the filer) - print( - "No annualReportDate given for in annual report. Filing id: %s", filing.id - ) + print("No annualReportDate given for in annual report. Filing id: %s", filing.id) legal_entity.last_ar_date = ar_date # Validations are on input @@ -52,9 +50,7 @@ def process(legal_entity: LegalEntity, filing: Dict, filing_meta: FilingMeta): legal_entity.last_ar_date = agm_date legal_entity.last_ar_year = ( - legal_entity.last_ar_year + 1 - if legal_entity.last_ar_year - else legal_entity.founding_date.year + 1 + legal_entity.last_ar_year + 1 if legal_entity.last_ar_year else legal_entity.founding_date.year + 1 ) # save the annual report date to the filing meta info diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_address.py b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_address.py index d056a39a74..786735712e 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_address.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_address.py @@ -17,10 +17,7 @@ from business_model import LegalEntity from entity_filer.filing_meta import FilingMeta -from entity_filer.filing_processors.filing_components import ( - create_address, - update_address, -) +from entity_filer.filing_processors.filing_components import create_address, update_address def process(business: LegalEntity, filing: Dict, filing_meta: FilingMeta): diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_directors.py b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_directors.py index a1aaf3ffe9..52c0a958f1 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_directors.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_directors.py @@ -15,20 +15,14 @@ from datetime import datetime from typing import Dict -# from entity_filer.exceptions import DefaultException, logger from business_model import LegalEntity, PartyRole +from entity_filer.exceptions import DefaultException, logger from entity_filer.filing_meta import FilingMeta -from entity_filer.filing_processors.filing_components import ( - merge_party, - create_role, - update_director, -) +from entity_filer.filing_processors.filing_components import create_role, merge_party, update_director -def process( - business: LegalEntity, filing: Dict, filing_meta: FilingMeta -): # pylint: disable=too-many-branches; +def process(business: LegalEntity, filing: Dict, filing_meta: FilingMeta): # pylint: disable=too-many-branches; """Render the change_of_directors onto the business model objects.""" if not (new_directors := filing["changeOfDirectors"].get("directors")): return @@ -47,21 +41,14 @@ def process( ) new_director_names.append(current_new_director_name.upper()) - for director in PartyRole.get_parties_by_role( - business.id, PartyRole.RoleTypes.DIRECTOR.value - ): + for director in PartyRole.get_parties_by_role(business.id, PartyRole.RoleTypes.DIRECTOR.value): existing_director_name = ( - director.party.first_name - + director.party.middle_initial - + director.party.last_name + director.party.first_name + director.party.middle_initial + director.party.last_name ) if existing_director_name.upper() == current_new_director_name.upper(): # Creates a new director record in Lear if a matching ceased director exists in Lear # and the colin json contains the same director record with cessation date null. - if ( - director.cessation_date is not None - and new_director.get("cessationDate") is None - ): + if director.cessation_date is not None and new_director.get("cessationDate") is None: director_found = False else: director_found = True @@ -76,7 +63,7 @@ def process( if "appointed" in new_director["actions"]: # add new diretor party role to the business - party = merge_party(business_id=business.id, party_info=new_director) + party = merge_party(legal_entity_id=business.id, party_info=new_director) role = { "roleType": "Director", "appointmentDate": new_director.get("appointmentDate"), @@ -85,9 +72,7 @@ def process( new_director_role = create_role(party=party, role_info=role) business.party_roles.append(new_director_role) - if any( - [action != "appointed" for action in new_director["actions"]] - ): # pylint: disable=use-a-generator + if any([action != "appointed" for action in new_director["actions"]]): # pylint: disable=use-a-generator # get name of director in json for comparison * new_director_name = ( new_director["officer"].get("firstName") @@ -102,35 +87,17 @@ def process( print("Could not resolve director name from json %s.", new_director) raise DefaultException - for director in PartyRole.get_parties_by_role( - business.id, PartyRole.RoleTypes.DIRECTOR.value - ): + for director in PartyRole.get_parties_by_role(business.id, PartyRole.RoleTypes.DIRECTOR.value): # get name of director in database for comparison * - director_name = ( - director.party.first_name - + director.party.middle_initial - + director.party.last_name - ) + director_name = director.party.first_name + director.party.middle_initial + director.party.last_name # Update only an active director - if ( - director_name.upper() == new_director_name.upper() - and director.cessation_date is None - ): + if director_name.upper() == new_director_name.upper() and director.cessation_date is None: update_director(director=director, new_info=new_director) break if filing.get("colinIds"): - for director in PartyRole.get_parties_by_role( - business.id, PartyRole.RoleTypes.DIRECTOR.value - ): + for director in PartyRole.get_parties_by_role(business.id, PartyRole.RoleTypes.DIRECTOR.value): # get name of director in database for comparison * - director_name = ( - director.party.first_name - + director.party.middle_initial - + director.party.last_name - ) - if ( - director_name.upper() not in new_director_names - and director.cessation_date is None - ): + director_name = director.party.first_name + director.party.middle_initial + director.party.last_name + if director_name.upper() not in new_director_names and director.cessation_date is None: director.cessation_date = datetime.utcnow() diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_name.py b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_name.py index d4e6bc28b6..59b269a8ec 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_name.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_name.py @@ -15,7 +15,7 @@ from typing import Dict # from entity_queue_common.service_utils import logger -from business_model import LegalEntity, Filing +from business_model import Filing, LegalEntity from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import name_request diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_registration.py b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_registration.py index e159a16449..420c65bad3 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_registration.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/change_of_registration.py @@ -17,22 +17,20 @@ from typing import Dict import dpath -from business_model import db, Address, AlternateName, LegalEntity, Filing -from entity_filer.exceptions.default_exception import DefaultException +from business_model import Address, AlternateName, Filing, LegalEntity, db +from entity_filer.exceptions.default_exception import DefaultException from entity_filer.filing_meta import FilingMeta -from entity_filer.filing_processors.filing_components import ( - filings, - legal_entity_info, - name_request, - update_address, +from entity_filer.filing_processors.filing_components import filings, legal_entity_info, name_request, update_address +from entity_filer.filing_processors.filing_components.alternate_name import ( + update_partner_change, + update_proprietor_change, ) from entity_filer.filing_processors.filing_components.parties import get_or_create_party, merge_all_parties from entity_filer.filing_processors.registration import get_partnership_name -from entity_filer.filing_processors.filing_components.alternate_name import update_partner_change, update_proprietor_change -def process( +def process( legal_entity: LegalEntity, change_filing_rec: Filing, change_filing: Dict, @@ -47,22 +45,19 @@ def process( filing_type="changeOfRegistration", change_filing_rec=change_filing_rec, change_filing=change_filing, - filing_meta=filing_meta.change_of_registration + filing_meta=filing_meta.change_of_registration, ) - case _: # LegalEntity.EntityTypes.SOLE_PROP: # legal_entity might be a proprietor? + case _: # LegalEntity.EntityTypes.SOLE_PROP: # legal_entity might be a proprietor? update_proprietor_change( - legal_entity=legal_entity, filing_type="changeOfRegistration", change_filing_rec=change_filing_rec, change_filing=change_filing, - filing_meta=filing_meta.change_of_registration + filing_meta=filing_meta.change_of_registration, ) - + # Update business office if present with suppress(IndexError, KeyError, TypeError): - business_office_json = dpath.util.get( - change_filing, "/changeOfRegistration/offices/businessOffice" - ) + business_office_json = dpath.util.get(change_filing, "/changeOfRegistration/offices/businessOffice") for updated_address in business_office_json.values(): if updated_address.get("id", None): address = Address.find_by_id(updated_address.get("id")) @@ -76,9 +71,7 @@ def process( # update court order, if any is present with suppress(IndexError, KeyError, TypeError): - court_order_json = dpath.util.get( - change_filing, "/changeOfRegistration/courtOrder" - ) + court_order_json = dpath.util.get(change_filing, "/changeOfRegistration/courtOrder") filings.update_filing_court_order(change_filing_rec, court_order_json) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py b/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py index dc91f227d8..7f791327ca 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py @@ -17,22 +17,19 @@ import datedelta import dpath -from business_model import LegalEntity, ConsentContinuationOut, Filing -from ..utils.legislation_datetime import LegislationDatetime +from business_model import ConsentContinuationOut, Filing, LegalEntity from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import filings +from ..utils.legislation_datetime import LegislationDatetime + -def process( - legal_entity: LegalEntity, cco_filing: Filing, filing: Dict, filing_meta: FilingMeta -): +def process(legal_entity: LegalEntity, cco_filing: Filing, filing: Dict, filing_meta: FilingMeta): """Render the consent continuation out filing into the legal_entity model objects.""" # update consent continuation out, if any is present with suppress(IndexError, KeyError, TypeError): - consent_continuation_out_json = dpath.util.get( - filing, "/consentContinuationOut/courtOrder" - ) + consent_continuation_out_json = dpath.util.get(filing, "/consentContinuationOut/courtOrder") filings.update_filing_court_order(cco_filing, consent_continuation_out_json) cco_filing.order_details = filing["consentContinuationOut"].get("details") diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py index a6fe1c6fd7..e68b7fff7b 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py @@ -16,12 +16,13 @@ from typing import Dict import dpath -from business_model import LegalEntity, Comment, Filing -from ..utils.legislation_datetime import LegislationDatetime +from business_model import Comment, Filing, LegalEntity from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import filings +from ..utils.legislation_datetime import LegislationDatetime + def process( legal_entity: LegalEntity, @@ -40,17 +41,11 @@ def process( details = continuation_out_json.get("details") legal_name = continuation_out_json.get("legalName") continuation_out_date_str = continuation_out_json.get("continuationOutDate") - continuation_out_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str( - continuation_out_date_str - ) - ) + continuation_out_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(continuation_out_date_str) foreign_jurisdiction = continuation_out_json.get("foreignJurisdiction") foreign_jurisdiction_country = foreign_jurisdiction.get("country").upper() - continuation_out_filing.comments.append( - Comment(comment=details, staff_id=continuation_out_filing.submitter_id) - ) + continuation_out_filing.comments.append(Comment(comment=details, staff_id=continuation_out_filing.submitter_id)) legal_entity.state = LegalEntity.State.HISTORICAL legal_entity.state_filing_id = continuation_out_filing.id @@ -62,9 +57,7 @@ def process( with suppress(IndexError, KeyError, TypeError): foreign_jurisdiction_region = foreign_jurisdiction.get("region") - foreign_jurisdiction_region = ( - foreign_jurisdiction_region.upper() if foreign_jurisdiction_region else None - ) + foreign_jurisdiction_region = foreign_jurisdiction_region.upper() if foreign_jurisdiction_region else None legal_entity.foreign_jurisdiction_region = foreign_jurisdiction_region filing_meta.continuation_out = {} diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/conversion.py b/queue_services/entity-filer/src/entity_filer/filing_processors/conversion.py index 78bedaa5ea..240e860c4e 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/conversion.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/conversion.py @@ -23,26 +23,24 @@ """ # pylint: disable=superfluous-parens; as pylance requires it from contextlib import suppress +from http import HTTPStatus from typing import Dict import dpath # from entity_queue_common.service_utils import BusinessException -from business_model import LegalEntity, Filing -from entity_filer.utils.legislation_datetime import LegislationDatetime +from business_model import Filing, LegalEntity from entity_filer.exceptions import BusinessException, DefaultException from entity_filer.filing_meta import FilingMeta - -from entity_filer.filing_processors.filing_components import ( - aliases, - legal_entity_info, - name_request, - shares, +from entity_filer.filing_processors.filing_components import aliases, legal_entity_info, name_request, shares +from entity_filer.filing_processors.filing_components.alternate_name import ( + update_partner_change, + update_proprietor_change, ) from entity_filer.filing_processors.filing_components.offices import update_offices from entity_filer.filing_processors.filing_components.parties import merge_all_parties -from entity_filer.filing_processors.filing_components.alternate_name import update_partner_change, update_proprietor_change +from entity_filer.utils.legislation_datetime import LegislationDatetime def process( @@ -56,22 +54,21 @@ def process( filing_meta.conversion = {} if not (conversion_filing := filing.get("filing", {}).get("conversion")): raise BusinessException( - f"CONVL legal_filing:conversion missing from {filing_rec.id}" + error=f"CONVL legal_filing:conversion missing from {filing_rec.id}", status_code=HTTPStatus.BAD_REQUEST ) # if legal_entity and legal_entity.entity_type in ['SP', 'GP']: if filing["filing"]["business"]["legalType"] in ["SP", "GP"]: - if legal_entity and not legal_entity.entity_type in [ - LegalEntity.EntityTypes.PERSON, - LegalEntity.EntityTypes.PARTNERSHIP, - ]: - raise DefaultException( - f"Filing business type and entity don't match, filing{filing_rec.id}" - ) + if legal_entity and not ( + legal_entity.entity_type + in [ + LegalEntity.EntityTypes.PERSON, + LegalEntity.EntityTypes.PARTNERSHIP, + ] + ): + raise DefaultException(f"Filing business type and entity don't match, filing{filing_rec.id}") _process_firms_conversion(legal_entity, filing, filing_rec, filing_meta) else: - legal_entity = _process_corps_conversion( - legal_entity, conversion_filing, filing, filing_rec - ) + legal_entity = _process_corps_conversion(legal_entity, conversion_filing, filing, filing_rec) return legal_entity, filing_rec @@ -79,11 +76,12 @@ def process( def _process_corps_conversion(legal_entity, conversion_filing, filing, filing_rec): if legal_entity: raise BusinessException( - f"Business Already Exist: CONVL legal_filing:conversion {filing_rec.id}" + error=f"Business Already Exist: CONVL legal_filing:conversion {filing_rec.id}", + status_code=HTTPStatus.FORBIDDEN, ) if not (corp_num := filing.get("filing", {}).get("business", {}).get("identifier")): raise BusinessException( - f"conversion {filing_rec.id} missing the legal_entity identifier." + error=f"conversion {filing_rec.id} missing the legal_entity identifier.", status_code=HTTPStatus.BAD_REQUEST ) # Initial insert of the legal_entity record legal_entity_info_obj = conversion_filing.get("nameRequest") @@ -93,7 +91,8 @@ def _process_corps_conversion(legal_entity, conversion_filing, filing, filing_re ) ): raise BusinessException( - f"CONVL conversion {filing_rec.id}, Unable to create legal_entity." + error=f"CONVL conversion {filing_rec.id}, Unable to create legal_entity.", + status_code=HTTPStatus.UNPROCESSABLE_ENTITY, ) if offices := conversion_filing.get("offices"): update_offices(legal_entity, offices) @@ -119,14 +118,14 @@ def _process_firms_conversion( filing_type="conversion", change_filing_rec=filing_rec, change_filing=conversion_filing, - filing_meta=filing_meta.conversion + filing_meta=filing_meta.conversion, ) - case _: # LegalEntity.EntityTypes.PERSON: # legal_entity might be a proprietor? + case _: # LegalEntity.EntityTypes.PERSON: # legal_entity might be a proprietor? update_proprietor_change( filing_type="conversion", change_filing_rec=filing_rec, change_filing=conversion_filing, - filing_meta=filing_meta.conversion + filing_meta=filing_meta.conversion, ) # Update legal_entity office if present @@ -141,14 +140,10 @@ def _process_firms_conversion( # update legal_entity start date, if any is present with suppress(IndexError, KeyError, TypeError): - legal_entity_start_date = dpath.util.get( - conversion_filing, "/filing/conversion/startDate" - ) + legal_entity_start_date = dpath.util.get(conversion_filing, "/filing/conversion/startDate") if legal_entity_start_date: - legal_entity.start_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str( - legal_entity_start_date - ) + legal_entity.start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str( + legal_entity_start_date ) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/correction.py b/queue_services/entity-filer/src/entity_filer/filing_processors/correction.py index 36aff4b03f..8fd928a73c 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/correction.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/correction.py @@ -17,14 +17,11 @@ import pytz import sentry_sdk - -from business_model import LegalEntity, Comment, Filing +from business_model import Comment, Filing, LegalEntity from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import name_request -from entity_filer.filing_processors.filing_components.correction import ( - correct_business_data, -) +from entity_filer.filing_processors.filing_components.correction import correct_business_data def process( @@ -59,17 +56,12 @@ def process( ) corrected_filing_type = filing["correction"]["correctedFilingType"] - if ( - business.entity_type in ["SP", "GP", "BC", "BEN", "CC", "ULC", "CP"] - and corrected_filing_type != "conversion" - ): + if business.entity_type in ["SP", "GP", "BC", "BEN", "CC", "ULC", "CP"] and corrected_filing_type != "conversion": correct_business_data(business, correction_filing, filing, filing_meta) else: # set correction filing to PENDING_CORRECTION, for manual intervention # - include flag so that listener in Filing model does not change state automatically to COMPLETE - correction_filing._status = ( - Filing.Status.PENDING_CORRECTION.value - ) # pylint: disable=protected-access + correction_filing._status = Filing.Status.PENDING_CORRECTION.value # pylint: disable=protected-access setattr(correction_filing, "skip_status_listener", True) original_filing.save_to_session() diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/court_order.py b/queue_services/entity-filer/src/entity_filer/filing_processors/court_order.py index 265fde9a4c..ab5326b99e 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/court_order.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/court_order.py @@ -16,7 +16,7 @@ from datetime import datetime from typing import Dict -from business_model import LegalEntity, Document, DocumentType, Filing +from business_model import Document, DocumentType, Filing, LegalEntity from entity_filer.filing_meta import FilingMeta @@ -29,15 +29,11 @@ def process( ): """Render the court order filing into the business model objects.""" court_order_filing.court_order_file_number = filing["courtOrder"].get("fileNumber") - court_order_filing.court_order_effect_of_order = filing["courtOrder"].get( - "effectOfOrder" - ) + court_order_filing.court_order_effect_of_order = filing["courtOrder"].get("effectOfOrder") court_order_filing.order_details = filing["courtOrder"].get("orderDetails") with suppress(IndexError, KeyError, TypeError, ValueError): - court_order_filing.court_order_date = datetime.fromisoformat( - filing["courtOrder"].get("orderDate") - ) + court_order_filing.court_order_date = datetime.fromisoformat(filing["courtOrder"].get("orderDate")) if file_key := filing["courtOrder"].get("fileKey"): document = Document() diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py index 0090687dd5..20d96cafde 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py @@ -19,34 +19,30 @@ import sentry_sdk # from entity_filer.exceptions import DefaultException, logger -from business_model import LegalEntity, Document, Filing +from business_model import Document, Filing, LegalEntity # from business_model.document import DocumentType from business_model.models.filing import DissolutionTypes -# from legal_api.services.minio import MinioService -# from legal_api.services.pdf_service import RegistrarStampData -from entity_filer.utils.legislation_datetime import LegislationDatetime -from entity_filer.exceptions import BusinessException, get_error_message, ErrorCode - +from entity_filer.exceptions import BusinessException, ErrorCode, get_error_message from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import create_office, filings from entity_filer.filing_processors.filing_components.parties import merge_all_parties +# from legal_api.services.minio import MinioService +# from legal_api.services.pdf_service import RegistrarStampData +from entity_filer.utils.legislation_datetime import LegislationDatetime + # from entity_filer.utils import replace_file_with_certified_copy -def process( - business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta -): +def process(business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): """Render the dissolution filing unto the model objects.""" if not (dissolution_filing := filing.get("dissolution")): print("Could not find Dissolution in: %s", filing) raise BusinessException( f"legal_filing:Dissolution missing from {filing}", - get_error_message( - ErrorCode.GENERAL_UNRECOVERABLE_ERROR, **{"filing_id": filing_rec.id} - ), + get_error_message(ErrorCode.GENERAL_UNRECOVERABLE_ERROR, **{"filing_id": filing_rec.id}), ) print("processing dissolution: %s", filing) @@ -66,11 +62,7 @@ def process( LegalEntity.EntityTypes.PARTNERSHIP.value, ): dissolution_date_str = dissolution_filing.get("dissolutionDate") - dissolution_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str( - dissolution_date_str - ) - ) + dissolution_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(dissolution_date_str) business.dissolution_date = dissolution_date business.state = LegalEntity.State.HISTORICAL @@ -105,15 +97,11 @@ def process( filing_meta.dissolution = { **filing_meta.dissolution, "dissolutionType": dissolution_type, - "dissolutionDate": LegislationDatetime.format_as_legislation_date( - business.dissolution_date - ), + "dissolutionDate": LegislationDatetime.format_as_legislation_date(business.dissolution_date), } -def _update_cooperative( - dissolution_filing: Dict, business: LegalEntity, filing: Filing, dissolution_type -): +def _update_cooperative(dissolution_filing: Dict, business: LegalEntity, filing: Filing, dissolution_type): """Update COOP data. This should not be updated for administrative dissolution @@ -138,9 +126,7 @@ def _update_cooperative( # business.documents.append(document) -def post_process( - business: LegalEntity, filing: Filing, correction: bool = False -): # pylint: disable=W0613 +def post_process(business: LegalEntity, filing: Filing, correction: bool = False): # pylint: disable=W0613 """Post processing activities for incorporations. THIS SHOULD NOT ALTER THE MODEL diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py index 49a96a78b5..88d651624d 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py @@ -17,13 +17,7 @@ from typing import Dict import pycountry -from business_model import Address -from business_model import EntityRole -from business_model import LegalEntity -from business_model import Office -from business_model import Party -from business_model import ShareClass -from business_model import ShareSeries +from business_model import Address, EntityRole, LegalEntity, Office, Party, ShareClass, ShareSeries from entity_filer.filing_processors.filing_components import ( aliases, @@ -34,7 +28,6 @@ shares, ) - JSON_ROLE_CONVERTER = { "custodian": EntityRole.RoleTypes.custodian.value, "completing party": EntityRole.RoleTypes.completing_party.value, @@ -58,9 +51,7 @@ def create_address(address_info: Dict, address_type: str) -> Address: street_additional=address_info.get("streetAddressAdditional"), city=address_info.get("addressCity"), region=address_info.get("addressRegion"), - country=pycountry.countries.search_fuzzy(address_info.get("addressCountry"))[ - 0 - ].alpha_2, + country=pycountry.countries.search_fuzzy(address_info.get("addressCountry"))[0].alpha_2, postal_code=address_info.get("postalCode"), delivery_instructions=address_info.get("deliveryInstructions"), address_type=db_address_type, @@ -74,9 +65,7 @@ def update_address(address: Address, new_info: dict) -> Address: address.street_additional = new_info.get("streetAddressAdditional") address.city = new_info.get("addressCity") address.region = new_info.get("addressRegion") - address.country = pycountry.countries.search_fuzzy(new_info.get("addressCountry"))[ - 0 - ].alpha_2 + address.country = pycountry.countries.search_fuzzy(new_info.get("addressCountry"))[0].alpha_2 address.postal_code = new_info.get("postalCode") address.delivery_instructions = new_info.get("deliveryInstructions") @@ -133,12 +122,9 @@ def merge_party(legal_entity_id: int, party_info: dict, create: bool = True) -> return party -def create_entity_party( - legal_entity_id: int, party_info: dict, create: bool = True -) -> LegalEntity: +def create_entity_party(legal_entity_id: int, party_info: dict, create: bool = True) -> LegalEntity: """Create a new party or get them if they already exist.""" - # HERE - legal_entity = None + if not (middle_initial := party_info["officer"].get("middleInitial")): middle_initial = party_info["officer"].get("middleName", "") @@ -202,23 +188,15 @@ def update_director(director: EntityRole, new_info: dict) -> EntityRole: director.party.title = new_info.get("title", "").upper() if director.party.delivery_address: - director.party.delivery_address = update_address( - director.party.delivery_address, new_info["deliveryAddress"] - ) + director.party.delivery_address = update_address(director.party.delivery_address, new_info["deliveryAddress"]) else: - director.party.delivery_address = create_address( - new_info["deliveryAddress"], Address.DELIVERY - ) + director.party.delivery_address = create_address(new_info["deliveryAddress"], Address.DELIVERY) if new_info.get("mailingAddress", None): if director.party.mailing_address is None: - director.party.mailing_address = create_address( - new_info["mailingAddress"], Address.MAILING - ) + director.party.mailing_address = create_address(new_info["mailingAddress"], Address.MAILING) else: - director.party.mailing_address = update_address( - director.party.mailing_address, new_info["mailingAddress"] - ) + director.party.mailing_address = update_address(director.party.mailing_address, new_info["mailingAddress"]) director.cessation_date = new_info.get("cessationDate") return director diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/aliases.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/aliases.py index d63514f957..cfcb2a4651 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/aliases.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/aliases.py @@ -14,8 +14,8 @@ """Manages the names of a Business.""" from typing import Dict -from flask_babel import _ as babel # noqa: N813 from business_model import Alias, LegalEntity +from flask_babel import _ as babel # noqa: N813 def update_aliases(business: LegalEntity, aliases) -> Dict: @@ -25,15 +25,11 @@ def update_aliases(business: LegalEntity, aliases) -> Dict: for alias in aliases: if (alias_id := alias.get("id")) and ( - existing_alias := next( - (x for x in business.aliases.all() if str(x.id) == alias_id), None - ) + existing_alias := next((x for x in business.aliases.all() if str(x.id) == alias_id), None) ): existing_alias.alias = alias["name"].upper() else: - new_alias = Alias( - alias=alias["name"].upper(), type=Alias.AliasType.TRANSLATION.value - ) + new_alias = Alias(alias=alias["name"].upper(), type=Alias.AliasType.TRANSLATION.value) business.aliases.append(new_alias) for current_alias in business.aliases.all(): diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/alternate_name.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/alternate_name.py index 737b36ed13..7fac8185dc 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/alternate_name.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/alternate_name.py @@ -15,13 +15,10 @@ from __future__ import annotations import datetime -import dpath from typing import Dict, List, Optional, Tuple -from business_model import db -from business_model import AlternateName -from business_model import Filing -from business_model import LegalEntity +import dpath +from business_model import AlternateName, Filing, LegalEntity from entity_filer import db from entity_filer.exceptions import DefaultException @@ -32,11 +29,11 @@ def update_partner_change( - legal_entity: LegalEntity, - filing_type: str, - change_filing_rec: Filing, - change_filing: Dict, - filing_meta: Dict, + legal_entity: LegalEntity, + filing_type: str, + change_filing_rec: Filing, + change_filing: Dict, + filing_meta: Dict, ): name_request = dpath.util.get(change_filing, f"/{filing_type}/nameRequest", default=None) if name_request and (to_legal_name := name_request.get("legalName")): @@ -50,9 +47,7 @@ def update_partner_change( alternate_name.change_filing_id = change_filing_rec.id if start := change_filing.get("filing", {}).get(f"{filing_type}", {}).get("startDate"): - start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str( - start - ) + start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(start) else: start_date = alternate_name.start_date # alternate_name.delete() @@ -61,7 +56,6 @@ def update_partner_change( db.session.delete(alternate_name) db.session.commit() - new_alternate_name = AlternateName( bn15=alternate_name.bn15, change_filing_id=change_filing_rec.id, @@ -81,11 +75,9 @@ def update_partner_change( } # Update Nature of LegalEntity - if ( - naics := change_filing.get(f"{filing_type}", {}) - .get("business", {}) - .get("naics") - ) and (naics_code := naics.get("naicsCode")): + if (naics := change_filing.get(f"{filing_type}", {}).get("business", {}).get("naics")) and ( + naics_code := naics.get("naicsCode") + ): if legal_entity.naics_code != naics_code: filing_meta = { **filing_meta, @@ -99,10 +91,10 @@ def update_partner_change( def update_proprietor_change( - filing_type: str, - change_filing_rec: Filing, - change_filing: Dict, - filing_meta: Dict, + filing_type: str, + change_filing_rec: Filing, + change_filing: Dict, + filing_meta: Dict, ): name_request = dpath.util.get(change_filing, f"/{filing_type}/nameRequest", default=None) identifier = dpath.util.get(change_filing_rec.filing_json, "filing/business/identifier") @@ -121,22 +113,14 @@ def update_proprietor_change( break if not proprietor_dict: - raise DefaultException( - f"No Proprietor in the SP {filing_type} for filing:{change_filing_rec.id}" - ) + raise DefaultException(f"No Proprietor in the SP {filing_type} for filing:{change_filing_rec.id}") - proprietor, delivery_address, mailing_address = get_or_create_party( - proprietor_dict, change_filing_rec - ) + proprietor, delivery_address, mailing_address = get_or_create_party(proprietor_dict, change_filing_rec) if not proprietor: - raise DefaultException( - f"No Proprietor in the SP {filing_type} for filing:{change_filing_rec.id}" - ) - + raise DefaultException(f"No Proprietor in the SP {filing_type} for filing:{change_filing_rec.id}") + if start := change_filing.get("filing", {}).get(f"{filing_type}", {}).get("startDate"): - start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str( - start - ) + start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(start) else: start_date = alternate_name.start_date diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py index 0ff87bdd6e..766ab608f1 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py @@ -17,8 +17,7 @@ from typing import Dict import dpath -from business_model import Address, LegalEntity, Filing, Party, PartyRole -from entity_filer.utils.legislation_datetime import LegislationDatetime +from business_model import Address, Filing, LegalEntity, Party, PartyRole from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import ( @@ -26,12 +25,14 @@ create_role, filings, legal_entity_info, + merge_party, resolutions, rules_and_memorandum, shares, update_address, ) from entity_filer.filing_processors.filing_components.parties import merge_all_parties +from entity_filer.utils.legislation_datetime import LegislationDatetime def correct_business_data( @@ -47,9 +48,7 @@ def correct_business_data( with suppress(IndexError, KeyError, TypeError): name_request_json = dpath.util.get(correction_filing, "/correction/nameRequest") from_legal_name = business.legal_name - legal_entity_info.set_legal_name( - business.identifier, business, name_request_json - ) + legal_entity_info.set_legal_name(business.identifier, business, name_request_json) if from_legal_name != business.legal_name: filing_meta.correction = { **filing_meta.correction, @@ -61,9 +60,7 @@ def correct_business_data( # Update cooperativeAssociationType if present with suppress(IndexError, KeyError, TypeError): - coop_association_type = dpath.util.get( - correction_filing, "/correction/cooperativeAssociationType" - ) + coop_association_type = dpath.util.get(correction_filing, "/correction/cooperativeAssociationType") from_association_type = business.association_type if coop_association_type: legal_entity_info.set_association_type(business, coop_association_type) @@ -76,11 +73,7 @@ def correct_business_data( } # Update Nature of Business - if ( - naics := correction_filing.get("correction", {}) - .get("business", {}) - .get("naics") - ): + if naics := correction_filing.get("correction", {}).get("business", {}).get("naics"): to_naics_code = naics.get("naicsCode") to_naics_description = naics.get("naicsDescription") if business.naics_description != to_naics_description: @@ -118,17 +111,11 @@ def correct_business_data( with suppress(IndexError, KeyError, TypeError): business_start_date = dpath.util.get(correction_filing, "/correction/startDate") if business_start_date: - business.start_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str( - business_start_date - ) - ) + business.start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(business_start_date) # update share structure and resolutions, if any with suppress(IndexError, KeyError, TypeError): - share_structure = dpath.util.get( - correction_filing, "/correction/shareStructure" - ) + share_structure = dpath.util.get(correction_filing, "/correction/shareStructure") shares.update_share_structure_correction(business, share_structure) # update resolution, if any @@ -151,9 +138,7 @@ def correct_business_data( # update business resolution date, if any is present with suppress(IndexError, KeyError, TypeError): - resolution_date = dpath.util.get( - correction_filing, "/correction/resolutionDate" - ) + resolution_date = dpath.util.get(correction_filing, "/correction/resolutionDate") resolutions.update_resolution_date(business, resolution_date) # update rules, if any @@ -161,9 +146,7 @@ def correct_business_data( rules_file_key = dpath.util.get(correction_filing, "/correction/rulesFileKey") rules_file_name = dpath.util.get(correction_filing, "/correction/rulesFileName") if rules_file_key: - rules_and_memorandum.update_rules( - business, correction_filing_rec, rules_file_key, rules_file_name - ) + rules_and_memorandum.update_rules(business, correction_filing_rec, rules_file_key, rules_file_name) filing_meta.correction = { **filing_meta.correction, **{"uploadNewRules": True}, @@ -174,20 +157,18 @@ def correct_business_data( memorandum_file_key = dpath.util.get(correction_filing, "/correction/memorandumFileKey") memorandum_file_name = dpath.util.get(correction_filing, "/correction/memorandumFileName") if memorandum_file_key: - rules_and_memorandum.update_memorandum(business, correction_filing_rec, - memorandum_file_key, memorandum_file_name) - filing_meta.correction = {**filing_meta.correction, - **{"uploadNewMemorandum": True}} + rules_and_memorandum.update_memorandum( + business, correction_filing_rec, memorandum_file_key, memorandum_file_name + ) + filing_meta.correction = {**filing_meta.correction, **{"uploadNewMemorandum": True}} with suppress(IndexError, KeyError, TypeError): if dpath.util.get(correction_filing, "/correction/memorandumInResolution"): - filing_meta.correction = {**filing_meta.correction, - **{"memorandumInResolution": True}} + filing_meta.correction = {**filing_meta.correction, **{"memorandumInResolution": True}} with suppress(IndexError, KeyError, TypeError): if dpath.util.get(correction_filing, "/correction/rulesInResolution"): - filing_meta.correction = {**filing_meta.correction, - **{"rulesInResolution": True}} + filing_meta.correction = {**filing_meta.correction, **{"rulesInResolution": True}} def update_parties(business: LegalEntity, parties: list, correction_filing_rec: Filing): @@ -195,9 +176,7 @@ def update_parties(business: LegalEntity, parties: list, correction_filing_rec: # Cease the party roles not present in the edit request end_date_time = datetime.datetime.utcnow() parties_to_update = [ - party.get("officer").get("id") - for party in parties - if party.get("officer").get("id") is not None + party.get("officer").get("id") for party in parties if party.get("officer").get("id") is not None ] existing_party_roles = PartyRole.get_party_roles(business.id, end_date_time.date()) for party_role in existing_party_roles: @@ -210,8 +189,7 @@ def update_parties(business: LegalEntity, parties: list, correction_filing_rec: # If id is present and is a GUID then this is an id specific to the UI which is not relevant to the backend. # The backend will have an id of type int if not party_info.get("officer").get("id") or ( - party_info.get("officer").get("id") - and not isinstance(party_info.get("officer").get("id"), int) + party_info.get("officer").get("id") and not isinstance(party_info.get("officer").get("id"), int) ): _create_party_info(business, correction_filing_rec, party_info) else: @@ -226,9 +204,7 @@ def _update_party(party_info): party.last_name = party_info["officer"].get("lastName", "").upper() party.middle_initial = party_info["officer"].get("middleName", "").upper() party.title = party_info.get("title", "").upper() - party.organization_name = ( - party_info["officer"].get("organizationName", "").upper() - ) + party.organization_name = party_info["officer"].get("organizationName", "").upper() party.party_type = party_info["officer"].get("partyType") party.email = party_info["officer"].get("email", "").lower() party.identifier = party_info["officer"].get("identifier", "").upper() @@ -240,7 +216,7 @@ def _update_party(party_info): def _create_party_info(business, correction_filing_rec, party_info): - party = merge_party(business_id=business.id, party_info=party_info, create=False) + party = merge_party(legal_entity_id=business.id, party_info=party_info, create=False) for role_type in party_info.get("roles"): role_str = role_type.get("roleType", "").lower() role = { diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/filings.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/filings.py index 5e5ced0662..93b41cd78b 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/filings.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/filings.py @@ -15,28 +15,23 @@ from contextlib import suppress from typing import Dict, Optional -from flask_babel import _ as babel # noqa: N813 from business_model import Filing +from flask_babel import _ as babel # noqa: N813 + from ...utils.datetime import datetime -def update_filing_court_order( - filing_submission: Filing, court_order_json: Dict -) -> Optional[Dict]: +def update_filing_court_order(filing_submission: Filing, court_order_json: Dict) -> Optional[Dict]: """Update the court_order info for a Filing.""" if not Filing: return {"error": babel("Filing required before alternate names can be set.")} filing_submission.court_order_file_number = court_order_json.get("fileNumber") - filing_submission.court_order_effect_of_order = court_order_json.get( - "effectOfOrder" - ) + filing_submission.court_order_effect_of_order = court_order_json.get("effectOfOrder") filing_submission.order_details = court_order_json.get("orderDetails") with suppress(IndexError, KeyError, TypeError, ValueError): - filing_submission.court_order_date = datetime.fromisoformat( - court_order_json.get("orderDate") - ) + filing_submission.court_order_date = datetime.fromisoformat(court_order_json.get("orderDate")) return None diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/legal_entity_info.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/legal_entity_info.py index c5caf4b5ae..924205ee18 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/legal_entity_info.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/legal_entity_info.py @@ -17,13 +17,9 @@ from typing import Dict import requests +from business_model import EntityRole, Filing, LegalEntity, LegalEntityIdentifier, LegalEntityType from flask import current_app from flask_babel import _ as babel # noqa: N813 -from business_model import LegalEntityIdentifier -from business_model import LegalEntityType -from business_model import LegalEntity -from business_model import Filing -from business_model import EntityRole # from legal_api.services import NaicsService @@ -70,15 +66,11 @@ def set_legal_name(corp_num: str, legal_entity: LegalEntity, legal_entity_info: legal_entity.legal_name = legal_name else: entity_type = legal_entity_info.get("legalType", None) - numbered_legal_name_suffix = LegalEntity.BUSINESSES[entity_type][ - "numberedBusinessNameSuffix" - ] + numbered_legal_name_suffix = LegalEntity.BUSINESSES[entity_type]["numberedBusinessNameSuffix"] legal_entity.legal_name = f"{corp_num[2:]} {numbered_legal_name_suffix}" -def update_legal_entity_info( - corp_num: str, legal_entity: LegalEntity, legal_entity_info: Dict, filing: Filing -): +def update_legal_entity_info(corp_num: str, legal_entity: LegalEntity, legal_entity_info: Dict, filing: Filing): """Format and update the legal_entity entity from incorporation filing.""" if corp_num and legal_entity and legal_entity_info and filing: set_legal_name(corp_num, legal_entity, legal_entity_info) @@ -96,8 +88,10 @@ def update_naics_info(legal_entity: LegalEntity, naics: Dict): # TODO update NAICS info legal_entity.naics_code = naics.get("naicsCode") if legal_entity.naics_code: - naics_structure = NaicsService.find_by_code(legal_entity.naics_code) - legal_entity.naics_key = naics_structure["naicsKey"] + # TODO: Uncomment next 2 lines when find_by_code implemented and delete "pass" + # naics_structure = NaicsService.find_by_code(legal_entity.naics_code) + # legal_entity.naics_key = naics_structure["naicsKey"] + pass else: legal_entity.naics_code = None legal_entity.naics_key = None diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/name_request.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/name_request.py index 1bc55c328f..43255a95d3 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/name_request.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/name_request.py @@ -17,22 +17,20 @@ import requests import sentry_sdk +from business_model import Filing, LegalEntity, RegistrationBootstrap # from entity_filer.exceptions import DefaultException from flask import current_app -from business_model import LegalEntity, Filing, RegistrationBootstrap # from legal_api.services.bootstrap import AccountService from entity_filer.services.utils import get_str -def consume_nr( - business: LegalEntity, filing: Filing, filing_type="incorporationApplication" -): +def consume_nr(business: LegalEntity, filing: Filing, filing_type="incorporationApplication"): """Update the nr to a consumed state.""" try: # skip this if none (nrNumber will not be available for numbered company) - if nr_num := get_str( + if nr_num := get_str( # noqa F841; remove this comment when below is done filing.filing_json, f"/filing/{filing_type}/nameRequest/nrNumber" ): pass @@ -59,12 +57,8 @@ def consume_nr( # AccountService.delete_affiliation(bootstrap.account, nr_num) except KeyError: pass # return - except ( - Exception - ): # pylint: disable=broad-except; note out any exception, but don't fail the call - sentry_sdk.print( - f"Queue Error: Consume NR error for filing:{filing.id}", level="error" - ) + except Exception: # pylint: disable=broad-except; note out any exception, but don't fail the call + sentry_sdk.print(f"Queue Error: Consume NR error for filing:{filing.id}", level="error") def set_legal_name(business: LegalEntity, name_request_info: dict): diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/parties.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/parties.py index b66af5bed4..e4d916c318 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/parties.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/parties.py @@ -17,20 +17,11 @@ import datetime from typing import Dict, List, Optional, Tuple -from business_model import Address -from business_model import ColinEntity -from business_model import Filing -from business_model import EntityRole -from business_model import LegalEntity +from business_model import Address, ColinEntity, EntityRole, Filing, LegalEntity from entity_filer import db -from entity_filer.exceptions import BusinessException -from entity_filer.exceptions import ErrorCode -from entity_filer.exceptions import get_error_message -from entity_filer.filing_processors.filing_components import create_address -from entity_filer.filing_processors.filing_components import create_role -from entity_filer.filing_processors.filing_components import merge_party -from entity_filer.filing_processors.filing_components import legal_entity_info +from entity_filer.exceptions import BusinessException, ErrorCode, get_error_message +from entity_filer.filing_processors.filing_components import create_address, create_role, legal_entity_info, merge_party def update_parties( @@ -66,9 +57,7 @@ def update_parties( try: for party_info in parties_structure: - party = merge_party( - legal_entity_id=legal_entity.id, party_info=party_info, create=False - ) + party = merge_party(legal_entity_id=legal_entity.id, party_info=party_info, create=False) for role_type in party_info.get("roles"): role_str = role_type.get("roleType", "").lower() role = { @@ -102,9 +91,7 @@ def delete_parties(legal_entity: LegalEntity): legal_entity.entity_roles.remove(role) -def merge_all_parties( - legal_entity: LegalEntity, filing: Filing, parties: dict -) -> [dict] | None: +def merge_all_parties(legal_entity: LegalEntity, filing: Filing, parties: dict) -> [dict] | None: """Merge all parties supplied and return a dict of errors, or None This top level method does 4 things: @@ -151,8 +138,7 @@ def merge_all_parties( (not party_identifier) and (party_id := party_dict.get("officer", {}).get("id")) and ( - (party_le := LegalEntity.find_by_id(party_id)) - or (party_le := ColinEntity.find_by_identifier(party_id)) + (party_le := LegalEntity.find_by_id(party_id)) or (party_le := ColinEntity.find_by_identifier(party_id)) ) ): existing_party = True @@ -176,9 +162,7 @@ def merge_all_parties( delivery_address = get_address_for_filing( party_le.entity_delivery_address, party_dict.get("deliveryAddress") ) - mailing_address = get_address_for_filing( - party_le.entity_mailing_address, party_dict.get("mailingAddress") - ) + mailing_address = get_address_for_filing(party_le.entity_mailing_address, party_dict.get("mailingAddress")) else: # New People and Orgs use the attached addresses if isinstance(party_le, ColinEntity): @@ -264,17 +248,11 @@ def merge_all_parties( ) case "Proprietor": - print(f"Proprietor role being skipped.") + print("Proprietor role being skipped.") case _: - errors.append( - { - "warning": f"role: {role_type} not assigned to party:{party_le}" - } - ) - print( - f"no matching roles for party: {party_type} and role: {role_type}" - ) + errors.append({"warning": f"role: {role_type} not assigned to party:{party_le}"}) + print(f"no matching roles for party: {party_type} and role: {role_type}") if memoize_existing_director_ids: delete_non_memoized_entity_role( @@ -282,9 +260,7 @@ def merge_all_parties( ) if memoize_existing_partners: - delete_non_memoized_entity_role( - legal_entity, filing, memoize_existing_partners, EntityRole.RoleTypes.partner - ) + delete_non_memoized_entity_role(legal_entity, filing, memoize_existing_partners, EntityRole.RoleTypes.partner) return errors if len(errors) > 0 else None @@ -364,9 +340,7 @@ def merge_entity_role_for_director( entity_role = EntityRole() # Blind updates - entity_role.appointment_date = ( - role_dict.get("appointmentDate") or filing.effective_date - ) + entity_role.appointment_date = role_dict.get("appointmentDate") or filing.effective_date entity_role.change_filing_id = filing.id # entity_role.delivery_address_id=delivery_address.id # entity_role.legal_entity_id=base_entity.id @@ -425,9 +399,7 @@ def create_entity_with_addresses(party_dict) -> LegalEntity: country="CA", postal_code=party_dict["mailingAddress"]["postalCode"], region=party_dict["mailingAddress"]["addressRegion"], - delivery_instructions=party_dict["mailingAddress"].get( - "deliveryInstructions", "" - ), + delivery_instructions=party_dict["mailingAddress"].get("deliveryInstructions", ""), ) # mailing_address.save() # new_party.mailing_address_id = mailing_address.id @@ -440,9 +412,7 @@ def create_entity_with_addresses(party_dict) -> LegalEntity: country="CA", postal_code=party_dict["deliveryAddress"]["postalCode"], region=party_dict["deliveryAddress"]["addressRegion"], - delivery_instructions=party_dict["deliveryAddress"].get( - "deliveryInstructions", "" - ), + delivery_instructions=party_dict["deliveryAddress"].get("deliveryInstructions", ""), ) # delivery_address.save() # new_party.delivery_address_id = delivery_address.id @@ -461,8 +431,7 @@ def get_address_for_filing(party_address: Address, address_dict: dict) -> Addres and party_address.country == address_dict["addressCountry"] and party_address.postal_code == address_dict["postalCode"] and party_address.region == address_dict["addressRegion"] - and party_address.delivery_instructions - == address_dict.get("deliveryInstructions", "") + and party_address.delivery_instructions == address_dict.get("deliveryInstructions", "") ): return party_address @@ -479,10 +448,7 @@ def get_address_for_filing(party_address: Address, address_dict: dict) -> Addres def delete_non_memoized_entity_role( - legal_entity: LegalEntity, - filing: Filing, - keep_list, - role: EntityRole.RoleTypes + legal_entity: LegalEntity, filing: Filing, keep_list, role: EntityRole.RoleTypes ) -> []: """Delete EntityRoles for role not in the keep_list.""" candidates = EntityRole.get_parties_by_role(legal_entity.id, role) @@ -511,10 +477,7 @@ def get_or_create_party(party_dict: dict, filing: Filing): ) or ( (not party_identifier) and (party_id := party_dict.get("officer", {}).get("id")) - and ( - (party_le := LegalEntity.find_by_id(party_id)) - or (party_le := ColinEntity.find_by_identifier(party_id)) - ) + and ((party_le := LegalEntity.find_by_id(party_id)) or (party_le := ColinEntity.find_by_identifier(party_id))) ): existing_party = True @@ -534,12 +497,8 @@ def get_or_create_party(party_dict: dict, filing: Filing): # for this set of roles if existing_party and party_type == "person": update_person_info(party_le, party_dict) - delivery_address = get_address_for_filing( - party_le.entity_delivery_address, party_dict.get("deliveryAddress") - ) - mailing_address = get_address_for_filing( - party_le.entity_mailing_address, party_dict.get("mailingAddress") - ) + delivery_address = get_address_for_filing(party_le.entity_delivery_address, party_dict.get("deliveryAddress")) + mailing_address = get_address_for_filing(party_le.entity_mailing_address, party_dict.get("mailingAddress")) else: # New People and Orgs use the attached addresses if isinstance(party_le, ColinEntity): @@ -556,9 +515,7 @@ def update_person_info(party_le, party_dict): party_le.first_name = party_dict["officer"].get("firstName", "").upper() party_le.last_name = party_dict["officer"].get("lastName", "").upper() party_le.middle_initial = ( - party_dict["officer"] - .get("middleInitial", party_dict["officer"].get("middleName", "")) - .upper() + party_dict["officer"].get("middleInitial", party_dict["officer"].get("middleName", "")).upper() ) party_le.email = party_dict["officer"].get("email") diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/resolutions.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/resolutions.py index 7f973bf023..0b987b4f7a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/resolutions.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/resolutions.py @@ -14,8 +14,8 @@ """Manages the resolutions of a LegalEntity.""" from typing import Dict, Optional -from dateutil.parser import parse from business_model import LegalEntity, Party +from dateutil.parser import parse def find_resolution_with_largest_id(resolutions) -> Optional[Dict]: diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py index b188c62c58..c4431ed2f9 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py @@ -17,7 +17,8 @@ from tokenize import String from typing import List, Optional -from business_model import LegalEntity, Document, Filing +from business_model import Document, Filing, LegalEntity + # from legal_api.services.pdf_service import RegistrarStampData # from business_model.document import DocumentType @@ -72,7 +73,7 @@ def update_memorandum( # if nothing is passed in, we don't care and it's not an error return None - is_correction = filing.filing_type == "correction" + is_correction = filing.filing_type == "correction" # noqa F841; remove this comment when below is done # create certified copy for memorandum document # memorandum_file = MinioService.get_file(memorandum_file_key) # registrar_stamp_data = RegistrarStampData(filing.effective_date, business.identifier, file_name, is_correction) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/shares.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/shares.py index 7a88c261fb..77b759df88 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/shares.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/shares.py @@ -16,13 +16,11 @@ from typing import Dict, List, Optional -from dateutil.parser import parse from business_model import LegalEntity, Resolution, ShareClass, ShareSeries +from dateutil.parser import parse -def update_share_structure( - business: LegalEntity, share_structure: Dict -) -> Optional[List]: +def update_share_structure(business: LegalEntity, share_structure: Dict) -> Optional[List]: """Manage the share structure for a business. Assumption: The structure has already been validated, upon submission. @@ -79,9 +77,7 @@ def update_share_structure( return err -def update_share_structure_correction( - business: LegalEntity, share_structure: Dict -) -> Optional[List]: +def update_share_structure_correction(business: LegalEntity, share_structure: Dict) -> Optional[List]: """Manage the share structure for a business. Assumption: The structure has already been validated, upon submission. @@ -100,23 +96,17 @@ def update_share_structure_correction( if resolution_dates := share_structure.get("resolutionDates"): # Two lists of dates in datetime format business_dates = [item.resolution_date for item in business.resolutions] - parsed_dates = [ - parse(resolution_dt).date() for resolution_dt in resolution_dates - ] + parsed_dates = [parse(resolution_dt).date() for resolution_dt in resolution_dates] # Dates in both db and json inclusion_entries = [ - business.resolutions[index] - for index, date in enumerate(business_dates) - if date in parsed_dates + business.resolutions[index] for index, date in enumerate(business_dates) if date in parsed_dates ] if len(inclusion_entries) > 0: business.resolutions = inclusion_entries # Dates in json and not in db - exclusion_entries = [ - date for date in parsed_dates if date not in business_dates - ] + exclusion_entries = [date for date in parsed_dates if date not in business_dates] resolution_dates = exclusion_entries @@ -190,9 +180,7 @@ def create_share_class(share_class_info: dict) -> ShareClass: return share_class -def update_business_share_class( - share_classes: list, business: LegalEntity, exclusion_entries: list -): +def update_business_share_class(share_classes: list, business: LegalEntity, exclusion_entries: list): """Update existing ones in both db if they are present in json.""" share_class_db_ids = [item.id for item in business.share_classes] diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py index f88d626ae7..25c115e488 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py @@ -18,24 +18,19 @@ from typing import Dict import sentry_sdk - -# from entity_filer.exceptions import DefaultException -from business_model import LegalEntity, Document, Filing, RegistrationBootstrap +from business_model import Document, Filing, LegalEntity, RegistrationBootstrap from business_model.models.document import DocumentType +from entity_filer.exceptions import DefaultException +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors.filing_components import aliases, filings, legal_entity_info, shares +from entity_filer.filing_processors.filing_components.offices import update_offices +from entity_filer.filing_processors.filing_components.parties import merge_all_parties + # from legal_api.services.bootstrap import AccountService # from legal_api.services.minio import MinioService # from legal_api.services.pdf_service import RegistrarStampData -from entity_filer.filing_meta import FilingMeta -from entity_filer.filing_processors.filing_components import ( - aliases, - filings, - legal_entity_info, - shares, -) -from entity_filer.filing_processors.filing_components.offices import update_offices -from entity_filer.filing_processors.filing_components.parties import merge_all_parties # from entity_filer.utils import replace_file_with_certified_copy @@ -80,7 +75,7 @@ def update_affiliation(business: LegalEntity, filing: Filing): def _update_cooperative(incorp_filing: Dict, business: LegalEntity, filing: Filing): - cooperative_obj = incorp_filing.get("cooperative", None) + cooperative_obj = incorp_filing.get("cooperative", None) # noqa F841; remove this comment when below is done # TODO remove all this # if cooperative_obj: # # create certified copy for rules document @@ -104,7 +99,6 @@ def _update_cooperative(incorp_filing: Dict, business: LegalEntity, filing: Fili # registrar_stamp_data = RegistrarStampData(business.founding_date, business.identifier) # replace_file_with_certified_copy(memorandum_file.data, memorandum_file_key, registrar_stamp_data) - # document = Document() # document.type = DocumentType.COOP_MEMORANDUM.value # document.file_key = memorandum_file_key @@ -127,13 +121,9 @@ def process( filing_meta.incorporation_application = {} if not incorp_filing: - raise DefaultException( - f"IA legal_filing:incorporationApplication missing from {filing_rec.id}" - ) + raise DefaultException(f"IA legal_filing:incorporationApplication missing from {filing_rec.id}") if business: - raise DefaultException( - f"Business Already Exist: IA legal_filing:incorporationApplication {filing_rec.id}" - ) + raise DefaultException(f"Business Already Exist: IA legal_filing:incorporationApplication {filing_rec.id}") business_info_obj = incorp_filing.get("nameRequest") @@ -149,9 +139,7 @@ def process( # Initial insert of the business record business = LegalEntity() - business = legal_entity_info.update_legal_entity_info( - corp_num, business, business_info_obj, filing_rec - ) + business = legal_entity_info.update_legal_entity_info(corp_num, business, business_info_obj, filing_rec) business = _update_cooperative(incorp_filing, business, filing_rec) business.state = LegalEntity.State.ACTIVE @@ -165,9 +153,7 @@ def process( } if not business: - raise DefaultException( - f"IA incorporationApplication {filing_rec.id}, Unable to create business." - ) + raise DefaultException(f"IA incorporationApplication {filing_rec.id}, Unable to create business.") if offices := incorp_filing["offices"]: update_offices(business, offices) @@ -191,12 +177,8 @@ def process( ia_json["filing"]["business"] = {} ia_json["filing"]["business"]["identifier"] = business.identifier ia_json["filing"]["business"]["legalType"] = business.entity_type - ia_json["filing"]["business"][ - "foundingDate" - ] = business.founding_date.isoformat() - filing_rec._filing_json = ( - ia_json # pylint: disable=protected-access; bypass to update filing data - ) + ia_json["filing"]["business"]["foundingDate"] = business.founding_date.isoformat() + filing_rec._filing_json = ia_json # pylint: disable=protected-access; bypass to update filing data return business, filing_rec, filing_meta diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_on.py b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_on.py index b8d3f24461..11d923d50d 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_on.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_on.py @@ -17,17 +17,14 @@ from typing import Dict import dpath +from business_model import Filing, LegalEntity -# from entity_filer.exceptions import DefaultException, logger -from business_model import LegalEntity, Filing - +from entity_filer.exceptions import DefaultException, logger from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import filings -def process( - business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta -): +def process(business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): """Render the put back on filing unto the model objects.""" if not (put_back_on_filing := filing.get("putBackOn")): print("Could not find putBackOn in: %s", filing) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_notation.py b/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_notation.py index 952ed56794..29742116ea 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_notation.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_notation.py @@ -23,15 +23,9 @@ def process(registrars_notation_filing: Filing, filing: Dict, filing_meta: FilingMeta): """Render the registrars notation filing into the business model objects.""" - registrars_notation_filing.court_order_file_number = filing[ - "registrarsNotation" - ].get("fileNumber") - registrars_notation_filing.court_order_effect_of_order = filing[ - "registrarsNotation" - ].get("effectOfOrder") - registrars_notation_filing.order_details = filing["registrarsNotation"][ - "orderDetails" - ] + registrars_notation_filing.court_order_file_number = filing["registrarsNotation"].get("fileNumber") + registrars_notation_filing.court_order_effect_of_order = filing["registrarsNotation"].get("effectOfOrder") + registrars_notation_filing.order_details = filing["registrarsNotation"]["orderDetails"] with suppress(IndexError, KeyError, TypeError, ValueError): registrars_notation_filing.court_order_date = datetime.fromisoformat( diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_order.py b/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_order.py index ed13eacbe1..db7a65b61a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_order.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/registrars_order.py @@ -23,15 +23,9 @@ def process(registrars_order_filing: Filing, filing: Dict, filing_meta: FilingMeta): """Render the registrars order filing into the business model objects.""" - registrars_order_filing.court_order_file_number = filing["registrarsOrder"].get( - "fileNumber" - ) - registrars_order_filing.court_order_effect_of_order = filing["registrarsOrder"].get( - "effectOfOrder" - ) + registrars_order_filing.court_order_file_number = filing["registrarsOrder"].get("fileNumber") + registrars_order_filing.court_order_effect_of_order = filing["registrarsOrder"].get("effectOfOrder") registrars_order_filing.order_details = filing["registrarsOrder"]["orderDetails"] with suppress(IndexError, KeyError, TypeError, ValueError): - registrars_order_filing.court_order_date = datetime.fromisoformat( - filing["registrarsOrder"].get("orderDate") - ) + registrars_order_filing.court_order_date = datetime.fromisoformat(filing["registrarsOrder"].get("orderDate")) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/registration.py b/queue_services/entity-filer/src/entity_filer/filing_processors/registration.py index 96796aceee..3951f081f2 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/registration.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/registration.py @@ -19,37 +19,34 @@ import dpath import sentry_sdk +from business_model import AlternateName, Filing, LegalEntity, RegistrationBootstrap # from entity_filer.exceptions import DefaultException from entity_filer.exceptions import DefaultException -from business_model import LegalEntity, Filing, RegistrationBootstrap, AlternateName - -# from legal_api.services.bootstrap import AccountService -from entity_filer.utils.legislation_datetime import LegislationDatetime - from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import filings, legal_entity_info +from entity_filer.filing_processors.filing_components.alternate_name import get_partnership_name from entity_filer.filing_processors.filing_components.offices import update_offices -from entity_filer.filing_processors.filing_components.parties import merge_all_parties from entity_filer.filing_processors.filing_components.parties import ( create_entity_with_addresses, + get_or_create_party, + merge_all_parties, ) -from entity_filer.filing_processors.filing_components.parties import get_or_create_party -from entity_filer.filing_processors.filing_components.alternate_name import get_partnership_name + +# from legal_api.services.bootstrap import AccountService +from entity_filer.utils.legislation_datetime import LegislationDatetime + def update_affiliation(business: LegalEntity, filing: Filing): """Create an affiliation for the business and remove the bootstrap.""" try: bootstrap = RegistrationBootstrap.find_by_identifier(filing.temp_reg) - pass_code = legal_entity_info.get_firm_affiliation_passcode(business.id) - - nr_number = ( - filing.filing_json.get("filing") - .get("registration", {}) - .get("nameRequest", {}) - .get("nrNumber") + pass_code = legal_entity_info.get_firm_affiliation_passcode( # noqa F841; remove this when below is done + business.id ) - details = { + + nr_number = filing.filing_json.get("filing").get("registration", {}).get("nameRequest", {}).get("nrNumber") + details = { # noqa F841; remove this comment when below is done "bootstrapIdentifier": bootstrap.identifier, "identifier": business.identifier, "nrNumber": nr_number, @@ -83,9 +80,7 @@ def update_affiliation(business: LegalEntity, filing: Filing): # or ('deaffiliation' in locals() and deaffiliation != HTTPStatus.OK)\ # or ('bootstrap_update' in locals() and bootstrap_update != HTTPStatus.OK): # raise DefaultException - except ( - Exception - ) as err: # pylint: disable=broad-except; note out any exception, but don't fail the call + except Exception as err: # pylint: disable=broad-except; note out any exception, but don't fail the call sentry_sdk.print( f"Queue Error: Affiliation error for filing:{filing.id}, with err:{err}", level="error", @@ -101,25 +96,17 @@ def process( """Process the incoming registration filing.""" # Extract the filing information for registration if business: - raise DefaultException( - f"Business Already Exist: Registration legal_filing:registration {filing_rec.id}" - ) + raise DefaultException(f"Business Already Exist: Registration legal_filing:registration {filing_rec.id}") if not (registration_filing := filing.get("filing", {}).get("registration")): - raise DefaultException( - f"Registration legal_filing:registration missing from {filing_rec.id}" - ) + raise DefaultException(f"Registration legal_filing:registration missing from {filing_rec.id}") - legal_type = registration_filing.get("businessType") or registration_filing.get( - "nameRequest", {} - ).get("legalType") + legal_type = registration_filing.get("businessType") or registration_filing.get("nameRequest", {}).get("legalType") if legal_type not in ( LegalEntity.EntityTypes.SOLE_PROP, LegalEntity.EntityTypes.PARTNERSHIP, ): - raise DefaultException( - f"{filing_rec.id} has no valid legatype for a Registration." - ) + raise DefaultException(f"{filing_rec.id} has no valid legatype for a Registration.") filing_meta.registration = {} @@ -127,9 +114,7 @@ def process( # Reserve the Corp Number for this entity if not (firm_reg_num := legal_entity_info.get_next_corp_num("FM")): - raise DefaultException( - f"registration {filing_rec.id} unable to get a Firm registration number." - ) + raise DefaultException(f"registration {filing_rec.id} unable to get a Firm registration number.") match legal_type: case LegalEntity.EntityTypes.SOLE_PROP: @@ -138,16 +123,12 @@ def process( case LegalEntity.EntityTypes.PARTNERSHIP: # Create Partnership - business = merge_partnership_registration( - firm_reg_num, filing, filing_rec, registration_filing - ) + business = merge_partnership_registration(firm_reg_num, filing, filing_rec, registration_filing) case _: # Default and failed # Based on the above checks, this should never happen - raise DefaultException( - f"registration {filing_rec.id} had no valid Firm type." - ) + raise DefaultException(f"registration {filing_rec.id} had no valid Firm type.") # Assuming we should not reset this from a filing if not business.tax_id: @@ -180,9 +161,7 @@ def process( registration_json = copy.deepcopy(filing_rec.filing_json) registration_json["filing"]["business"] = {} registration_json["filing"]["business"]["identifier"] = business.identifier - registration_json["filing"]["registration"]["business"][ - "identifier" - ] = business.identifier + registration_json["filing"]["registration"]["business"]["identifier"] = business.identifier registration_json["filing"]["business"]["legalType"] = business.entity_type # registration_json['filing']['business']['foundingDate'] = business.founding_date.isoformat() filing_rec._filing_json = registration_json # pylint: disable=protected-access; bypass to update filing data @@ -207,9 +186,7 @@ def merge_partnership_registration( # Initial insert of the business record business_info_obj = registration_filing.get("nameRequest") business = LegalEntity() - business = legal_entity_info.update_legal_entity_info( - registration_num, business, business_info_obj, filing_rec - ) + business = legal_entity_info.update_legal_entity_info(registration_num, business, business_info_obj, filing_rec) business.start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str( registration_filing.get("startDate") ) @@ -234,15 +211,11 @@ def merge_partnership_registration( return business -def merge_sp_registration( - registration_num: str, filing: Dict, filing_rec: Filing -) -> LegalEntity: +def merge_sp_registration(registration_num: str, filing: Dict, filing_rec: Filing) -> LegalEntity: # find or create the LE for the SP Owner if not (parties_dict := filing["filing"]["registration"]["parties"]): - raise DefaultException( - f"Missing parties in the SP registration for filing:{filing_rec.id}" - ) + raise DefaultException(f"Missing parties in the SP registration for filing:{filing_rec.id}") # Find the Proprietor proprietor = None @@ -255,28 +228,15 @@ def merge_sp_registration( break if not proprietor_dict: - raise DefaultException( - f"No Proprietor in the SP registration for filing:{filing_rec.id}" - ) + raise DefaultException(f"No Proprietor in the SP registration for filing:{filing_rec.id}") - proprietor, delivery_address, mailing_address = get_or_create_party( - proprietor_dict, filing_rec - ) + proprietor, delivery_address, mailing_address = get_or_create_party(proprietor_dict, filing_rec) if not proprietor: - raise DefaultException( - f"No Proprietor in the SP registration for filing:{filing_rec.id}" - ) + raise DefaultException(f"No Proprietor in the SP registration for filing:{filing_rec.id}") - operating_name = ( - filing.get("filing", {}) - .get("registration", {}) - .get("nameRequest", {}) - .get("legalName") - ) + operating_name = filing.get("filing", {}).get("registration", {}).get("nameRequest", {}).get("legalName") if start := filing.get("filing", {}).get("registration", {}).get("startDate"): - start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str( - start - ) + start_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(start) elif filing.effective_date: start_date = filing.effective_date.isoformat() else: diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/restoration.py b/queue_services/entity-filer/src/entity_filer/filing_processors/restoration.py index 44879f2d4d..ed6d84070a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/restoration.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/restoration.py @@ -18,24 +18,18 @@ import dpath import sentry_sdk -from business_model import LegalEntity, Filing, EntityRole -from entity_filer.utils.datetime import datetime -from entity_filer.utils.legislation_datetime import LegislationDatetime +from business_model import EntityRole, Filing, LegalEntity from entity_filer.filing_meta import FilingMeta -from entity_filer.filing_processors.filing_components import ( - filings, - legal_entity_info, - name_request, -) +from entity_filer.filing_processors.filing_components import filings, legal_entity_info, name_request from entity_filer.filing_processors.filing_components.aliases import update_aliases from entity_filer.filing_processors.filing_components.offices import update_offices from entity_filer.filing_processors.filing_components.parties import merge_all_parties +from entity_filer.utils.datetime import datetime +from entity_filer.utils.legislation_datetime import LegislationDatetime -def process( - business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta -): +def process(business: LegalEntity, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): """Process restoration filing.""" restoration_filing = filing.get("restoration") filing_meta.restoration = {} @@ -43,9 +37,7 @@ def process( from_legal_name = business.legal_name if name_request_json := restoration_filing.get("nameRequest"): - legal_entity_info.set_legal_name( - business.identifier, business, name_request_json - ) + legal_entity_info.set_legal_name(business.identifier, business, name_request_json) if nr_number := name_request_json.get("nrNumber", None): filing_meta.restoration = {**filing_meta.restoration, "nrNumber": nr_number} @@ -57,12 +49,8 @@ def process( # adding this intentionally for now to refer in ledger (filing-ui) } - if expiry := restoration_filing.get( - "expiry" - ): # limitedRestoration, limitedRestorationExtension - business.restoration_expiry_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str(expiry) - ) + if expiry := restoration_filing.get("expiry"): # limitedRestoration, limitedRestorationExtension + business.restoration_expiry_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(expiry) filing_meta.restoration = {**filing_meta.restoration, "expiry": expiry} else: # fullRestoration, limitedRestorationToFull business.restoration_expiry_date = None @@ -88,16 +76,10 @@ def process( application_date = restoration_filing.get("applicationDate") notice_date = restoration_filing.get("noticeDate") if application_date and notice_date: - filing_rec.application_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str( - application_date - ) - ) - filing_rec.notice_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str( - notice_date - ) + filing_rec.application_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str( + application_date ) + filing_rec.notice_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(notice_date) def _update_parties(business: LegalEntity, parties: dict, filing_rec: Filing): diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/special_resolution.py b/queue_services/entity-filer/src/entity_filer/filing_processors/special_resolution.py index 9043f9dc0e..7c54e23f76 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/special_resolution.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/special_resolution.py @@ -12,10 +12,9 @@ """File processing rules and actions for Special Resolution filings.""" from typing import Dict -from dateutil.parser import parse - # from entity_queue_common.service_utils import logger -from business_model import LegalEntity, Filing, Resolution +from business_model import Filing, LegalEntity, Resolution +from dateutil.parser import parse def process(business: LegalEntity, filing: Dict, filing_rec: Filing): @@ -30,21 +29,15 @@ def process(business: LegalEntity, filing: Dict, filing_rec: Filing): if signatory := resolution_filing.get("signatory"): signatory_le = LegalEntity( - first_name=signatory.get("givenName").upper() - if signatory.get("givenName") - else None, + first_name=signatory.get("givenName").upper() if signatory.get("givenName") else None, last_name=signatory.get("familyName", "").upper(), - middle_initial=signatory.get("additionalName").upper() - if signatory.get("additionalName") - else None, + middle_initial=signatory.get("additionalName").upper() if signatory.get("additionalName") else None, entity_type=LegalEntity.EntityTypes.PERSON, ) resolution.signing_legal_entity = signatory_le if resolution_filing.get("resolutionDate"): - resolution.resolution_date = parse( - resolution_filing.get("resolutionDate") - ).date() + resolution.resolution_date = parse(resolution_filing.get("resolutionDate")).date() if resolution_filing.get("signingDate"): resolution.signing_date = parse(resolution_filing.get("signingDate")).date() diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/transition.py b/queue_services/entity-filer/src/entity_filer/filing_processors/transition.py index 5793477ffd..7ba528624b 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/transition.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/transition.py @@ -14,31 +14,23 @@ """File processing rules and actions for the transition of a business.""" from typing import Dict -# from entity_filer.exceptions import DefaultException -from business_model import LegalEntity, Filing +from business_model import Filing, LegalEntity +from entity_filer.exceptions import DefaultException from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import aliases, shares from entity_filer.filing_processors.filing_components.offices import update_offices from entity_filer.filing_processors.filing_components.parties import merge_all_parties -def process( - business: LegalEntity, filing_rec: Filing, filing: Dict, filing_meta: FilingMeta -): +def process(business: LegalEntity, filing_rec: Filing, filing: Dict, filing_meta: FilingMeta): # pylint: disable=too-many-locals; 1 extra """Process the incoming transition filing.""" # Extract the filing information for transition application - if not ( - transition_filing := filing.get("transition") - ): # pylint: disable=superfluous-parens; - raise DefaultException( - f"legal_filing:transition data missing from {filing_rec.id}" - ) + if not (transition_filing := filing.get("transition")): # pylint: disable=superfluous-parens; + raise DefaultException(f"legal_filing:transition data missing from {filing_rec.id}") if not business: - raise DefaultException( - f"Business does not exist: legal_filing:transitionApplication {filing_rec.id}" - ) + raise DefaultException(f"Business does not exist: legal_filing:transitionApplication {filing_rec.id}") # Initial insert of the business record business.restriction_ind = transition_filing.get("hasProvisions") diff --git a/queue_services/entity-filer/src/entity_filer/resources/worker.py b/queue_services/entity-filer/src/entity_filer/resources/worker.py index 851d0a10a8..a357d3bcb8 100644 --- a/queue_services/entity-filer/src/entity_filer/resources/worker.py +++ b/queue_services/entity-filer/src/entity_filer/resources/worker.py @@ -40,29 +40,17 @@ from contextlib import suppress from dataclasses import dataclass from http import HTTPStatus -from typing import Dict -from typing import Optional - -from flask import Blueprint -from flask import request - -from entity_filer import db +from typing import Dict, Optional # from legal_api.core import Filing as FilingCore -from business_model import LegalEntity, Filing - -# from legal_api.services.bootstrap import AccountService -from entity_filer.utils.datetime import datetime -from sqlalchemy.exc import OperationalError +from business_model import Filing, LegalEntity +from flask import Blueprint, request from simple_cloudevent import SimpleCloudEvent -from werkzeug.exceptions import UnsupportedMediaType -from werkzeug.exceptions import BadRequest +from sqlalchemy.exc import OperationalError +from werkzeug.exceptions import BadRequest, UnsupportedMediaType -from entity_filer.services import queue -from entity_filer.services.logging import structured_log -from entity_filer.exceptions import BusinessException -from entity_filer.exceptions import DefaultException -from entity_filer import config +from entity_filer import config, db +from entity_filer.exceptions import BusinessException, DefaultException from entity_filer.filing_meta import FilingMeta, json_serial from entity_filer.filing_processors import ( admin_freeze, @@ -91,7 +79,11 @@ transition, ) from entity_filer.filing_processors.filing_components import name_request +from entity_filer.services import queue +from entity_filer.services.logging import structured_log +# from legal_api.services.bootstrap import AccountService +from entity_filer.utils.datetime import datetime bp = Blueprint("worker", __name__) @@ -121,10 +113,8 @@ def worker(): # ## try: process_filing(filing_message) - except (AttributeError, BusinessException, DefaultException) as err: - return { - "error": f"Unable to process filing: {filing_message}" - }, HTTPStatus.BAD_REQUEST + except (AttributeError, BusinessException, DefaultException) as err: # noqa F841 + return {"error": f"Unable to process filing: {filing_message}"}, HTTPStatus.BAD_REQUEST structured_log(request, "INFO", f"completed ce: {str(ce)}") return {}, HTTPStatus.OK @@ -150,6 +140,7 @@ def get_filing_message(ce: SimpleCloudEvent): return fm return None + def dict_keys_to_snake_case(d: dict): """Convert the keys of a dict to snake_case""" pattern = re.compile(r"(? Optional[dict]: with suppress(Exception): - if (envelope := request.get_json()) and GcpQueue.is_valid_envelope( - envelope - ): + if (envelope := request.get_json()) and GcpQueue.is_valid_envelope(envelope): return envelope return None @staticmethod - def get_simple_cloud_event( - request: LocalProxy, return_raw: bool = False - ) -> type[SimpleCloudEvent | dict | None]: + def get_simple_cloud_event(request: LocalProxy, return_raw: bool = False) -> type[SimpleCloudEvent | dict | None]: """Return a SimpleCloudEvent if one is in session from the PubSub call. Parameters @@ -176,9 +164,7 @@ def publish(self, topic: str, payload: bytes): return future.result() except (CancelledError, TimeoutError) as error: - raise Exception( - "Unable to post to queue", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to post to queue", error) from error # pylint: disable=W0719 @staticmethod def to_queue_message(ce: SimpleCloudEvent): diff --git a/queue_services/entity-filer/src/entity_filer/services/logging.py b/queue_services/entity-filer/src/entity_filer/services/logging.py index 8fb46cbb79..cd8dfd3e73 100644 --- a/queue_services/entity-filer/src/entity_filer/services/logging.py +++ b/queue_services/entity-filer/src/entity_filer/services/logging.py @@ -40,6 +40,7 @@ def structured_log(request: LocalProxy, severity: str = "NOTICE", message: str = None): + """Prints structured log message""" frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) @@ -52,9 +53,7 @@ def structured_log(request: LocalProxy, severity: str = "NOTICE", message: str = if trace_header and PROJECT: trace = trace_header.split("/") - global_log_fields[ - "logging.googleapis.com/trace" - ] = f"projects/{PROJECT}/traces/{trace[0]}" + global_log_fields["logging.googleapis.com/trace"] = f"projects/{PROJECT}/traces/{trace[0]}" # Complete a structured log entry. entry = dict( diff --git a/queue_services/entity-filer/src/entity_filer/translations/__init__.py b/queue_services/entity-filer/src/entity_filer/translations/__init__.py index b403a93dc4..97012b4925 100644 --- a/queue_services/entity-filer/src/entity_filer/translations/__init__.py +++ b/queue_services/entity-filer/src/entity_filer/translations/__init__.py @@ -14,7 +14,4 @@ """Translations for the API messages, not for the content returned from the datastore or entered by users.""" from flask_babel import Babel - -babel = ( - Babel() -) # pylint: disable=invalid-name; by convention our external services are lower case +babel = Babel() # pylint: disable=invalid-name; by convention our external services are lower case diff --git a/queue_services/entity-filer/src/entity_filer/utils/datetime.py b/queue_services/entity-filer/src/entity_filer/utils/datetime.py index 39437f44d2..b66a2f1fef 100644 --- a/queue_services/entity-filer/src/entity_filer/utils/datetime.py +++ b/queue_services/entity-filer/src/entity_filer/utils/datetime.py @@ -14,18 +14,14 @@ """Date time utilities.""" # from datetime import datetime, timezone import time as _time -from datetime import ( - date, - datetime as _datetime, - timezone, -) # pylint: disable=unused-import # noqa: F401, I001, I005 +from datetime import date +from datetime import datetime as _datetime # pylint: disable=unused-import # noqa: F401, I001, I005 +from datetime import timezone # noqa: I003,I005 -class datetime( - _datetime -): # pylint: disable=invalid-name; # noqa: N801; ha datetime is invalid?? +class datetime(_datetime): # pylint: disable=invalid-name; # noqa: N801; ha datetime is invalid?? """Alternative to the built-in datetime that has a timezone on the UTC call.""" @classmethod diff --git a/queue_services/entity-filer/src/entity_filer/utils/legislation_datetime.py b/queue_services/entity-filer/src/entity_filer/utils/legislation_datetime.py index aba1167378..0151f438a6 100644 --- a/queue_services/entity-filer/src/entity_filer/utils/legislation_datetime.py +++ b/queue_services/entity-filer/src/entity_filer/utils/legislation_datetime.py @@ -26,16 +26,12 @@ class LegislationDatetime: @staticmethod def now() -> datetime: """Construct a datetime using the legislation timezone.""" - return datetime.now().astimezone( - pytz.timezone(current_app.config.get("LEGISLATIVE_TIMEZONE")) - ) + return datetime.now().astimezone(pytz.timezone(current_app.config.get("LEGISLATIVE_TIMEZONE"))) @staticmethod def tomorrow_midnight() -> datetime: """Construct a datetime tomorrow midnight using the legislation timezone.""" - _date = datetime.now().astimezone( - pytz.timezone(current_app.config.get("LEGISLATIVE_TIMEZONE")) - ) + _date = datetime.now().astimezone(pytz.timezone(current_app.config.get("LEGISLATIVE_TIMEZONE"))) _date += datedelta.datedelta(days=1) _date = _date.replace(hour=0, minute=0, second=0, microsecond=0) @@ -44,9 +40,7 @@ def tomorrow_midnight() -> datetime: @staticmethod def as_legislation_timezone(date_time: datetime) -> datetime: """Return a datetime adjusted to the legislation timezone.""" - return date_time.astimezone( - pytz.timezone(current_app.config.get("LEGISLATIVE_TIMEZONE")) - ) + return date_time.astimezone(pytz.timezone(current_app.config.get("LEGISLATIVE_TIMEZONE"))) @staticmethod def as_legislation_timezone_from_date(_date: date) -> datetime: @@ -77,9 +71,7 @@ def as_utc_timezone(date_time: datetime) -> datetime: @staticmethod def as_utc_timezone_from_legislation_date_str(date_string: str) -> datetime: """Return a datetime adjusted to the GMT timezone (aka UTC) from a date (1900-12-31) string.""" - _date_time = LegislationDatetime.as_legislation_timezone_from_date_str( - date_string - ) + _date_time = LegislationDatetime.as_legislation_timezone_from_date_str(date_string) return LegislationDatetime.as_utc_timezone(_date_time) @staticmethod @@ -90,9 +82,7 @@ def format_as_report_string(date_time: datetime) -> str: hour = date_time.strftime("%I").lstrip("0") # %p provides locale value: AM, PM (en_US); am, pm (de_DE); So forcing it to be lower in any case am_pm = date_time.strftime("%p").lower() - date_time_str = date_time.strftime( - f"%B %-d, %Y at {hour}:%M {am_pm} Pacific time" - ) + date_time_str = date_time.strftime(f"%B %-d, %Y at {hour}:%M {am_pm} Pacific time") return date_time_str @staticmethod @@ -119,9 +109,7 @@ def format_as_report_string_with_custom_time( hour = date_time.strftime("%I").lstrip("0") # %p provides locale value: AM, PM (en_US); am, pm (de_DE); So forcing it to be lower in any case am_pm = date_time.strftime("%p").lower() - date_time_str = date_time.strftime( - f"%B %-d, %Y at {hour}:%M {am_pm} Pacific time" - ) + date_time_str = date_time.strftime(f"%B %-d, %Y at {hour}:%M {am_pm} Pacific time") return date_time_str @staticmethod @@ -132,9 +120,7 @@ def format_as_report_expiry_string(date_time: datetime) -> str: midnight for expiry times. """ # ensure is set to correct timezone - date_time_str = LegislationDatetime.format_as_report_string_with_custom_time( - date_time, 0, 1, 0, 0 - ) + date_time_str = LegislationDatetime.format_as_report_string_with_custom_time(date_time, 0, 1, 0, 0) return date_time_str @staticmethod diff --git a/queue_services/entity-filer/src/entity_filer/utils/utils.py b/queue_services/entity-filer/src/entity_filer/utils/utils.py index 3b66557812..34a19ee10e 100644 --- a/queue_services/entity-filer/src/entity_filer/utils/utils.py +++ b/queue_services/entity-filer/src/entity_filer/utils/utils.py @@ -37,6 +37,7 @@ """ import io import os +from importlib.metadata import version # import PyPDF2 @@ -46,9 +47,6 @@ # from legal_api.services.pdf_service import RegistrarStampData # from legal_api.utils.legislation_datetime import LegislationDatetime -import os -from importlib.metadata import version - def _get_commit_hash(): """Return the containers ref if present.""" @@ -66,16 +64,15 @@ def get_run_version(): def replace_file_with_certified_copy( - _bytes: bytes, - key: str, -# data: RegistrarStampData + _bytes: bytes, + key: str, + # data: RegistrarStampData ): """Create a certified copy and replace it into Minio server.""" raise Exception # TODO we shouldn't do this anymore - # open_pdf_file = io.BytesIO(_bytes) # pdf_reader = PyPDF2.PdfFileReader(open_pdf_file) # pdf_writer = PyPDF2.PdfFileWriter() diff --git a/queue_services/entity-filer/tests/__init__.py b/queue_services/entity-filer/tests/__init__.py index ffba84616e..0f9d97b75f 100644 --- a/queue_services/entity-filer/tests/__init__.py +++ b/queue_services/entity-filer/tests/__init__.py @@ -37,7 +37,6 @@ from collections.abc import MutableMapping, MutableSequence from typing import Dict, List - EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362) @@ -69,9 +68,7 @@ def del_key_in_dict(orig_dict, keys): if rv := scan_list(value, keys): modified_dict[key] = rv else: - modified_dict[ - key - ] = value # or copy.deepcopy(value) if a copy is desired for non-dicts. + modified_dict[key] = value # or copy.deepcopy(value) if a copy is desired for non-dicts. return modified_dict def scan_list(orig_list, keys): diff --git a/queue_services/entity-filer/tests/conftest.py b/queue_services/entity-filer/tests/conftest.py index 8bb4da5bd8..9765060793 100644 --- a/queue_services/entity-filer/tests/conftest.py +++ b/queue_services/entity-filer/tests/conftest.py @@ -20,13 +20,13 @@ import time from contextlib import contextmanager, suppress +import business_model_migrations import pytest import requests import sqlalchemy -import business_model_migrations +from business_model import db as _db from flask import Flask, current_app from flask_migrate import Migrate, upgrade -from business_model import db as _db # from legal_api import db as _db # from legal_api import jwt as _jwt @@ -207,7 +207,7 @@ def db(app): # pylint: disable=redefined-outer-name, invalid-name dir_path = os.path.dirname(business_model_migrations.__file__) - migrate = Migrate(app, _db, directory=dir_path, **{"dialect_name": "postgres"}) + migrate = Migrate(app, _db, directory=dir_path, **{"dialect_name": "postgres"}) # noqa F841 upgrade() yield _db @@ -249,9 +249,7 @@ def session(app, db): # pylint: disable=redefined-outer-name, invalid-name @event.listens_for(sess(), "after_transaction_end") def restart_savepoint(sess2, trans): # pylint: disable=unused-variable # Detecting whether this is indeed the nested transaction of the test - if ( - trans.nested and not trans._parent.nested - ): # pylint: disable=protected-access + if trans.nested and not trans._parent.nested: # pylint: disable=protected-access # Handle where test DOESN'T session.commit(), sess2.expire_all() sess.begin_nested() @@ -391,9 +389,7 @@ def create_test_db( DATABASE_URI = DATABASE_URI[: DATABASE_URI.rfind("/")] + "/postgres" try: - with sqlalchemy.create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with sqlalchemy.create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(f"CREATE DATABASE {database}")) return True @@ -425,8 +421,6 @@ def drop_test_db( AND pid <> pg_backend_pid(); """ with suppress(sqlalchemy.exc.ProgrammingError, Exception): - with sqlalchemy.create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with sqlalchemy.create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(close_all)) conn.execute(text(f"DROP DATABASE {database}")) diff --git a/queue_services/entity-filer/tests/unit/__init__.py b/queue_services/entity-filer/tests/unit/__init__.py index 35242a1acc..7be1400587 100644 --- a/queue_services/entity-filer/tests/unit/__init__.py +++ b/queue_services/entity-filer/tests/unit/__init__.py @@ -17,11 +17,12 @@ from contextlib import contextmanager import sqlalchemy +from business_model import Filing, db +from business_model.models.colin_event_id import ColinEventId from freezegun import freeze_time + from entity_filer.utils.datetime import datetime, timezone from tests import EPOCH_DATETIME, FROZEN_DATETIME -from business_model import db, Filing -from business_model.models.colin_event_id import ColinEventId AR_FILING = { "filing": { @@ -384,12 +385,8 @@ def create_filing( if json_filing: # filing.filing_json = json_filing filing._filing_json = json_filing - filing._filing_type = ( - json_filing.get("filing", {}).get("header", {}).get("name") - ) - filing._filing_sub_type = filing.get_filings_sub_type( - filing._filing_type, json_filing - ) + filing._filing_type = json_filing.get("filing", {}).get("header", {}).get("name") + filing._filing_sub_type = filing.get_filings_sub_type(filing._filing_type, json_filing) if business_id: filing.legal_entity_id = business_id if bootstrap_id: @@ -530,9 +527,7 @@ def create_entity_person(party_json): country="CA", postal_code=party_json["mailingAddress"]["postalCode"], region=party_json["mailingAddress"]["addressRegion"], - delivery_instructions=party_json["mailingAddress"] - .get("deliveryInstructions", "") - .upper(), + delivery_instructions=party_json["mailingAddress"].get("deliveryInstructions", "").upper(), ) new_party.entity_mailing_address = mailing_address if party_json.get("deliveryAddress"): @@ -542,9 +537,7 @@ def create_entity_person(party_json): country="CA", postal_code=party_json["deliveryAddress"]["postalCode"], region=party_json["deliveryAddress"]["addressRegion"], - delivery_instructions=party_json["deliveryAddress"] - .get("deliveryInstructions", "") - .upper(), + delivery_instructions=party_json["deliveryAddress"].get("deliveryInstructions", "").upper(), ) new_party.entity_delivery_address = delivery_address new_party.save() @@ -582,9 +575,7 @@ def factory_completed_filing( ): """Create a completed filing.""" if not payment_token: - payment_token = str(base64.urlsafe_b64encode(uuid.uuid4().bytes)).replace( - "=", "" - ) + payment_token = str(base64.urlsafe_b64encode(uuid.uuid4().bytes)).replace("=", "") with freeze_time(filing_date): filing = Filing() diff --git a/queue_services/entity-filer/tests/unit/experiment/test_versioning.py b/queue_services/entity-filer/tests/unit/experiment/test_versioning.py index 1f0e99a596..437efc4144 100644 --- a/queue_services/entity-filer/tests/unit/experiment/test_versioning.py +++ b/queue_services/entity-filer/tests/unit/experiment/test_versioning.py @@ -1,7 +1,6 @@ import datetime import pytest - from business_model import LegalEntity SKIP_NON_MANUAL_RUN = True diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_offices.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_offices.py index f706d2246b..c7e03a816f 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_offices.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_offices.py @@ -18,7 +18,6 @@ from entity_filer.filing_processors.filing_components.offices import update_offices from tests import strip_keys_from_dict - OFFICE_STRUCTURE = { "offices": { "recordsOffice": { @@ -69,9 +68,7 @@ "test_name,office_structure,expected_error", [("valid office", OFFICE_STRUCTURE, None)], ) -def test_manage_office_structure__offices( - app, session, test_name, office_structure, expected_error -): +def test_manage_office_structure__offices(app, session, test_name, office_structure, expected_error): """Assert that the corp offices gets set.""" business = LegalEntity() business.save() @@ -91,9 +88,7 @@ def test_manage_office_structure__delete_and_recreate_offices( update_and_validate_office(business, office_structure) # Change the value of address to recreate - office_structure["offices"]["recordsOffice"]["mailingAddress"][ - "postalCode" - ] = "L6M 5M7" + office_structure["offices"]["recordsOffice"]["mailingAddress"]["postalCode"] = "L6M 5M7" update_and_validate_office(business, office_structure) @@ -108,9 +103,7 @@ def update_and_validate_office(business, office_structure): for s in check_offices: check_office_structure["offices"][s.office_type] = {} for address in s.addresses: - check_office_structure["offices"][s.office_type][ - f"{address.address_type}Address" - ] = address.json + check_office_structure["offices"][s.office_type][f"{address.address_type}Address"] = address.json stripped_dict = strip_keys_from_dict(check_office_structure, ["id", "addressType"]) assert stripped_dict == office_structure assert not err diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_parties_entity_roles.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_parties_entity_roles.py index a5510e0e3c..98e2c81691 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_parties_entity_roles.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_parties_entity_roles.py @@ -37,37 +37,29 @@ Director X CUD Incorporator X X CU CU Liquidator X CUD - Partner X X CUD + Partner X X CUD Proprietor X X C """ -import json import datetime +import json from contextlib import suppress from copy import deepcopy from random import randint import pytest -from business_model import Address -from business_model import EntityRole -from business_model import Filing -from business_model import LegalEntity -from sql_versioning import history_cls -from sql_versioning import versioned_session - -from tests.unit import nested_session +from business_model import Address, EntityRole, Filing, LegalEntity +from sql_versioning import history_cls, versioned_session -from entity_filer.exceptions import BusinessException -from entity_filer.exceptions import ErrorCode -from entity_filer.exceptions import get_error_message +from entity_filer.exceptions import BusinessException, ErrorCode, get_error_message from entity_filer.filing_processors.filing_components.parties import ( - merge_entity_role_to_filing, create_entity_with_addresses, get_address_for_filing, map_schema_role_to_enum, merge_all_parties, + merge_entity_role_to_filing, ) - +from tests.unit import nested_session BASE_TEMPLATE = { "roles": [], @@ -318,9 +310,7 @@ def test_person_and_role_exists(session, test_name, schema_role, template): @pytest.mark.parametrize("test_name,schema_role,template", TEST_PARTY_ROLES) -def test_person_and_role_exists_cessation_date_set( - session, test_name, schema_role, template -): +def test_person_and_role_exists_cessation_date_set(session, test_name, schema_role, template): """Test where the person and role exists and the role is ceased. Assumption: Entity exists. @@ -376,23 +366,13 @@ def test_person_and_role_exists_cessation_date_set( entity_roles = EntityRole.get_entity_roles_by_filing(filing_id=filing.id) assert len(entity_roles) == 0 - historical_roles = EntityRole.get_entity_roles_history_by_filing( - filing_id=filing.id - ) + historical_roles = EntityRole.get_entity_roles_history_by_filing(filing_id=filing.id) number_of_historical_roles = len(historical_roles) assert number_of_historical_roles >= 2 - assert historical_roles[ - number_of_historical_roles - 1 - ].role_type == map_schema_role_to_enum(schema_role) + assert historical_roles[number_of_historical_roles - 1].role_type == map_schema_role_to_enum(schema_role) + assert historical_roles[number_of_historical_roles - 1].related_entity_id == person.id assert ( - historical_roles[number_of_historical_roles - 1].related_entity_id - == person.id - ) - assert ( - historical_roles[number_of_historical_roles - 1].cessation_date.replace( - tzinfo=None - ) - == cessation_date + historical_roles[number_of_historical_roles - 1].cessation_date.replace(tzinfo=None) == cessation_date ) @@ -445,9 +425,7 @@ def test_directors_exist_but_not_in_filing(session): assert len(current_entity_roles) == 1 - historical_roles = EntityRole.get_entity_roles_history_by_filing( - filing_id=filing.id - ) + historical_roles = EntityRole.get_entity_roles_history_by_filing(filing_id=filing.id) number_of_historical_roles = len(historical_roles) # Should be at least 2 records for each historical role. assert number_of_historical_roles == 4 diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py index b1ad916596..74aa5e97b4 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py @@ -37,9 +37,7 @@ ), ], ) -def test_manage_share_structure__resolution_dates( - app, session, test_name, resolution_dates, expected_error -): +def test_manage_share_structure__resolution_dates(app, session, test_name, resolution_dates, expected_error): """Assert that the corp share resolution date gets set.""" new_data = {"shareStructure": {"resolutionDates": resolution_dates}} @@ -55,9 +53,7 @@ def test_manage_share_structure__resolution_dates( assert err == expected_error else: assert len(check_resolution) == len(resolution_dates) - assert set(resolution_dates) == set( - [x.resolution_date.isoformat() for x in check_resolution] - ) + assert set(resolution_dates) == set([x.resolution_date.isoformat() for x in check_resolution]) SINGLE_SHARE_CLASS = { @@ -91,9 +87,7 @@ def test_manage_share_structure__resolution_dates( "test_name,share_structure,expected_error", [("valid single_share_class", SINGLE_SHARE_CLASS, None)], ) -def test_manage_share_structure__share_classes( - app, session, test_name, share_structure, expected_error -): +def test_manage_share_structure__share_classes(app, session, test_name, share_structure, expected_error): """Assert that the corp share classes gets set.""" business = LegalEntity() business.save() diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/utils.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/utils.py index f0d3b90aea..37e1b9b4b8 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/utils.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/utils.py @@ -1,6 +1,6 @@ -from attrs import has, fields +from attrs import fields, has from cattrs import Converter -from cattrs.gen import make_dict_unstructure_fn, make_dict_structure_fn, override +from cattrs.gen import make_dict_structure_fn, make_dict_unstructure_fn, override converter = Converter() @@ -12,17 +12,13 @@ def to_camel_case(snake_str: str) -> str: def to_camel_case_unstructure(cls): return make_dict_unstructure_fn( - cls, - converter, - **{a.name: override(rename=to_camel_case(a.name)) for a in fields(cls)} + cls, converter, **{a.name: override(rename=to_camel_case(a.name)) for a in fields(cls)} ) def to_camel_case_structure(cls): return make_dict_structure_fn( - cls, - converter, - **{a.name: override(rename=to_camel_case(a.name)) for a in fields(cls)} + cls, converter, **{a.name: override(rename=to_camel_case(a.name)) for a in fields(cls)} ) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_admin_freeze.py b/queue_services/entity-filer/tests/unit/filing_processors/test_admin_freeze.py index 8bb912d4c9..3062229129 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_admin_freeze.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_admin_freeze.py @@ -35,13 +35,12 @@ import copy import random -from business_model import LegalEntity, Filing +from business_model import Filing, LegalEntity from registry_schemas.example_data import ADMIN_FREEZE, FILING_HEADER from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import admin_freeze -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -68,7 +67,7 @@ def test_worker_admin_freeze(app, session, mocker): # Check outcome final_filing = Filing.find_by_id(filing_id) - assert business.admin_freeze == True + assert business.admin_freeze is True assert business.state_filing_id is None assert business.dissolution_date is None assert filing_json["filing"]["adminFreeze"]["details"] == final_filing.order_details diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_agm_location_change.py b/queue_services/entity-filer/tests/unit/filing_processors/test_agm_location_change.py index 0e197c30f2..42000fdd5f 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_agm_location_change.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_agm_location_change.py @@ -18,8 +18,7 @@ from business_model import Filing from registry_schemas.example_data import AGM_LOCATION_CHANGE, FILING_HEADER -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -47,7 +46,7 @@ def test_worker_agm_location_change(app, session, mocker): final_filing = Filing.find_by_id(filing.id) assert final_filing.id assert final_filing.meta_data - + agm_location_change = final_filing.meta_data.get("agmLocationChange") assert filing_json["filing"]["agmLocationChange"]["year"] == agm_location_change.get("year") assert filing_json["filing"]["agmLocationChange"]["agmLocation"] == agm_location_change.get("agmLocation") diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_aliases.py b/queue_services/entity-filer/tests/unit/filing_processors/test_aliases.py index eb0a2846c8..e5bfa68298 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_aliases.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_aliases.py @@ -47,12 +47,8 @@ def test_modified_aliases(app, session): old_value_2 = "B1 LTD." new_value_2 = "B2 LTD." business = create_business(identifier) - business.aliases.append( - Alias(alias=old_value_1, type=Alias.AliasType.TRANSLATION.value) - ) - business.aliases.append( - Alias(alias=old_value_2, type=Alias.AliasType.TRANSLATION.value) - ) + business.aliases.append(Alias(alias=old_value_1, type=Alias.AliasType.TRANSLATION.value)) + business.aliases.append(Alias(alias=old_value_2, type=Alias.AliasType.TRANSLATION.value)) business.save() business_aliases = business.aliases.all() assert len(business_aliases) == 2 @@ -70,9 +66,7 @@ def test_modified_aliases(app, session): business_aliases = business.aliases.all() assert len(business_aliases) == 2 for alias in component["nameTranslations"]: - business_alias = next( - (x for x in business_aliases if str(x.id) == alias["id"]), None - ) + business_alias = next((x for x in business_aliases if str(x.id) == alias["id"]), None) assert business_alias.alias == alias["name"].upper() @@ -84,12 +78,8 @@ def test_cease_aliases(app, session): alias_2 = "A2 LTD." alias_3 = "A3 LTD." business = create_business(identifier) - business.aliases.append( - Alias(alias=alias_1, type=Alias.AliasType.TRANSLATION.value) - ) - business.aliases.append( - Alias(alias=alias_2, type=Alias.AliasType.TRANSLATION.value) - ) + business.aliases.append(Alias(alias=alias_1, type=Alias.AliasType.TRANSLATION.value)) + business.aliases.append(Alias(alias=alias_2, type=Alias.AliasType.TRANSLATION.value)) business.save() assert len(business.aliases.all()) == 2 @@ -113,12 +103,8 @@ def test_all_aliases(app, session): alias_3 = "A3 LTD." alias_4 = "A4 LTD." business = create_business(identifier) - business.aliases.append( - Alias(alias=alias_1, type=Alias.AliasType.TRANSLATION.value) - ) - business.aliases.append( - Alias(alias=alias_2, type=Alias.AliasType.TRANSLATION.value) - ) + business.aliases.append(Alias(alias=alias_1, type=Alias.AliasType.TRANSLATION.value)) + business.aliases.append(Alias(alias=alias_2, type=Alias.AliasType.TRANSLATION.value)) business.save() business_aliases = business.aliases.all() assert len(business_aliases) == 2 @@ -137,21 +123,13 @@ def test_all_aliases(app, session): assert 2 == len(business_aliases) assert ( next( - ( - x - for x in business_aliases - if str(x.id) == component["nameTranslations"][0]["id"] - ), + (x for x in business_aliases if str(x.id) == component["nameTranslations"][0]["id"]), None, ).alias == alias_3.upper() ) assert next( - ( - x - for x in business_aliases - if x.alias == component["nameTranslations"][1]["name"].upper() - ), + (x for x in business_aliases if x.alias == component["nameTranslations"][1]["name"].upper()), None, ) assert not next((x for x in business_aliases if x.alias == alias_1.upper()), None) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_alteration.py b/queue_services/entity-filer/tests/unit/filing_processors/test_alteration.py index aeb35d7241..46e2c73019 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_alteration.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_alteration.py @@ -18,23 +18,15 @@ from typing import Final import pytest -from business_model import LegalEntity, Filing, Document +from business_model import Document, Filing, LegalEntity from business_model.models.document import DocumentType -from registry_schemas.example_data import ( - ALTERATION, - ALTERATION_FILING_TEMPLATE, - BUSINESS, - COURT_ORDER, - FILING_HEADER, -) +from registry_schemas.example_data import ALTERATION, ALTERATION_FILING_TEMPLATE, BUSINESS, COURT_ORDER, FILING_HEADER from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import alteration -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing - CONTACT_POINT = {"email": "no_one@never.get", "phone": "123-456-7890"} @@ -57,9 +49,7 @@ def test_alteration_process(app, session, orig_legal_type, new_legal_type): alteration_filing["filing"]["alteration"] = copy.deepcopy(ALTERATION) alteration_filing["filing"]["alteration"]["business"]["legalType"] = new_legal_type payment_id = str(random.SystemRandom().getrandbits(0x58)) - filing_submission = create_filing( - payment_id, alteration_filing, business_id=business.id - ) + filing_submission = create_filing(payment_id, alteration_filing, business_id=business.id) filing_meta = FilingMeta() @@ -125,9 +115,7 @@ def test_worker_alteration(app, session, mocker, orig_legal_type, new_legal_type ("no_change", "1234567 B.C. LTD.", None), # No change in name ], ) -def test_alteration_legal_name( - app, session, mocker, test_name, legal_name, new_legal_name -): +def test_alteration_legal_name(app, session, mocker, test_name, legal_name, new_legal_name): """Assert the worker process calls the alteration correctly.""" identifier = "BC1234567" business = create_business(identifier) @@ -229,17 +217,11 @@ def test_alteration_coop_association_type(app, session, new_association_type): business.entity_type = LegalEntity.EntityTypes.COOP.value alteration_filing = copy.deepcopy(FILING_HEADER) - alteration_filing["filing"]["business"][ - "legalType" - ] = LegalEntity.EntityTypes.COOP.value + alteration_filing["filing"]["business"]["legalType"] = LegalEntity.EntityTypes.COOP.value alteration_filing["filing"]["alteration"] = copy.deepcopy(ALTERATION) - alteration_filing["filing"]["alteration"][ - "cooperativeAssociationType" - ] = new_association_type + alteration_filing["filing"]["alteration"]["cooperativeAssociationType"] = new_association_type payment_id = str(random.SystemRandom().getrandbits(0x58)) - filing_submission = create_filing( - payment_id, alteration_filing, business_id=business.id - ) + filing_submission = create_filing(payment_id, alteration_filing, business_id=business.id) filing_meta = FilingMeta() @@ -263,9 +245,7 @@ def test_alteration_coop_rules_and_memorandum(app, session): business.entity_type = LegalEntity.EntityTypes.COOP.value alteration_filing = copy.deepcopy(FILING_HEADER) - alteration_filing["filing"]["business"][ - "legalType" - ] = LegalEntity.EntityTypes.COOP.value + alteration_filing["filing"]["business"]["legalType"] = LegalEntity.EntityTypes.COOP.value alteration_filing["filing"]["alteration"] = copy.deepcopy(ALTERATION) # TODO diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py b/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py index 7e23553378..c308c7629d 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py @@ -17,18 +17,14 @@ import random from unittest.mock import patch +from business_model import Filing, LegalEntity from freezegun import freeze_time -from business_model import LegalEntity, Filing from registry_schemas.example_data import ANNUAL_REPORT # from entity_filer.filing_processors.filing_components import create_party, create_role from entity_filer.filing_meta import FilingMeta -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage -from tests.unit import ( - create_business, - create_filing, -) +from entity_filer.resources.worker import FilingMessage, process_filing +from tests.unit import create_business, create_filing def test_process_ar_filing(app, session): @@ -41,7 +37,6 @@ def test_process_ar_filing(app, session): # setup business = create_business(identifier, "CP") - business_id = business.id now = datetime.date(2020, 9, 17) ar_date = datetime.date(2020, 8, 5) agm_date = datetime.date(2020, 7, 1) @@ -55,11 +50,7 @@ def test_process_ar_filing(app, session): # TEST with freeze_time(now): filing = create_filing(payment_id, ar, business.id) - filing_id = filing.id - filing_msg = FilingMessage(filing_identifier=filing_id) - annual_report.process( - business, filing.filing_json["filing"], filing_meta=filing_meta - ) + annual_report.process(business, filing.filing_json["filing"], filing_meta=filing_meta) # check it out # NOTE: until we save or convert the dates, they are FakeDate objects, so casting to str() @@ -74,9 +65,7 @@ def test_process_ar_filing_no_agm(app, session): identifier = "CP1234567" # setup - business = create_business( - identifier, legal_type=LegalEntity.EntityTypes.COOP.value - ) + business = create_business(identifier, legal_type=LegalEntity.EntityTypes.COOP.value) business_id = business.id now = datetime.date(2020, 9, 17) ar_date = datetime.date(2020, 8, 5) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py b/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py index 8c03df26eb..ed55591518 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py @@ -16,13 +16,12 @@ import random from datetime import datetime -from business_model import LegalEntity, Filing +from business_model import Filing, LegalEntity from business_model.utils.legislation_datetime import LegislationDatetime from registry_schemas.example_data import CONTINUATION_OUT, FILING_TEMPLATE from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import continuation_out - from tests.unit import create_business, create_filing @@ -37,68 +36,37 @@ def test_worker_continuation_out(app, session): filing_json["filing"]["continuationOut"] = CONTINUATION_OUT payment_id = str(random.SystemRandom().getrandbits(0x58)) - continuation_out_filing = create_filing( - payment_id, filing_json, business_id=business.id - ) + continuation_out_filing = create_filing(payment_id, filing_json, business_id=business.id) filing_meta = FilingMeta() # Test - continuation_out.process( - business, continuation_out_filing, filing_json["filing"], filing_meta - ) + continuation_out.process(business, continuation_out_filing, filing_json["filing"], filing_meta) business.save() # Check outcome final_filing = Filing.find_by_id(continuation_out_filing.id) - foreign_jurisdiction_json = filing_json["filing"]["continuationOut"][ - "foreignJurisdiction" - ] - continuation_out_date_str = filing_json["filing"]["continuationOut"][ - "continuationOutDate" - ] - continuation_out_date = ( - LegislationDatetime.as_utc_timezone_from_legislation_date_str( - continuation_out_date_str - ) - ) + foreign_jurisdiction_json = filing_json["filing"]["continuationOut"]["foreignJurisdiction"] + continuation_out_date_str = filing_json["filing"]["continuationOut"]["continuationOutDate"] + continuation_out_date = LegislationDatetime.as_utc_timezone_from_legislation_date_str(continuation_out_date_str) - assert ( - filing_json["filing"]["continuationOut"]["courtOrder"]["fileNumber"] - == final_filing.court_order_file_number - ) + assert filing_json["filing"]["continuationOut"]["courtOrder"]["fileNumber"] == final_filing.court_order_file_number assert ( filing_json["filing"]["continuationOut"]["courtOrder"]["effectOfOrder"] == final_filing.court_order_effect_of_order ) - assert ( - filing_json["filing"]["continuationOut"]["details"] - == final_filing.comments[0].comment - ) + assert filing_json["filing"]["continuationOut"]["details"] == final_filing.comments[0].comment assert final_filing.submitter_id == final_filing.comments[0].staff_id assert business.state == LegalEntity.State.HISTORICAL assert business.state_filing_id == final_filing.id assert business.jurisdiction == foreign_jurisdiction_json["country"].upper() - assert ( - business.foreign_jurisdiction_region - == foreign_jurisdiction_json["region"].upper() - ) - assert ( - business.foreign_legal_name - == filing_json["filing"]["continuationOut"]["legalName"] - ) + assert business.foreign_jurisdiction_region == foreign_jurisdiction_json["region"].upper() + assert business.foreign_legal_name == filing_json["filing"]["continuationOut"]["legalName"] assert business.continuation_out_date == continuation_out_date assert business.dissolution_date == continuation_out_date - assert ( - filing_meta.continuation_out["country"] == foreign_jurisdiction_json["country"] - ) + assert filing_meta.continuation_out["country"] == foreign_jurisdiction_json["country"] assert filing_meta.continuation_out["region"] == foreign_jurisdiction_json["region"] - assert ( - filing_meta.continuation_out["continuationOutDate"] == continuation_out_date_str - ) - assert ( - filing_meta.continuation_out["legalName"] - == filing_json["filing"]["continuationOut"]["legalName"] - ) + assert filing_meta.continuation_out["continuationOutDate"] == continuation_out_date_str + assert filing_meta.continuation_out["legalName"] == filing_json["filing"]["continuationOut"]["legalName"] diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_conversion.py b/queue_services/entity-filer/tests/unit/filing_processors/test_conversion.py index d009370722..dd43e1572b 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_conversion.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_conversion.py @@ -25,9 +25,7 @@ from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import conversion - -from tests.unit import create_filing -from tests.unit import nested_session +from tests.unit import create_filing, nested_session def test_conversion_process_with_nr(app, session): @@ -51,18 +49,10 @@ def test_conversion_process_with_nr(app, session): # Assertions assert business.identifier == identifier assert business.founding_date == effective_date - assert ( - business.entity_type - == filing["filing"]["conversion"]["nameRequest"]["legalType"] - ) - assert ( - business.legal_name - == filing["filing"]["conversion"]["nameRequest"]["legalName"] - ) + assert business.entity_type == filing["filing"]["conversion"]["nameRequest"]["legalType"] + assert business.legal_name == filing["filing"]["conversion"]["nameRequest"]["legalName"] assert len(business.share_classes.all()) == 2 - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. def test_conversion_process_no_nr(app, session): @@ -82,15 +72,10 @@ def test_conversion_process_no_nr(app, session): # Assertions assert business.identifier == identifier assert business.founding_date == effective_date - assert ( - business.entity_type - == filing["filing"]["conversion"]["nameRequest"]["legalType"] - ) + assert business.entity_type == filing["filing"]["conversion"]["nameRequest"]["legalType"] assert business.legal_name == business.identifier[2:] + " B.C. LTD." assert len(business.share_classes.all()) == 2 - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. def test_conversion_coop_from_colin(app, session): @@ -125,17 +110,10 @@ def test_conversion_coop_from_colin(app, session): # Assertions assert business.identifier == identifier - assert business.founding_date.replace(tzinfo=None) == effective_date.replace( - tzinfo=None - ) - assert ( - business.entity_type - == filing["filing"]["conversion"]["nameRequest"]["legalType"] - ) + assert business.founding_date.replace(tzinfo=None) == effective_date.replace(tzinfo=None) + assert business.entity_type == filing["filing"]["conversion"]["nameRequest"]["legalType"] assert business.legal_name == "Test" - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. @pytest.mark.parametrize( @@ -175,17 +153,10 @@ def test_conversion_bc_company_from_colin(app, session, legal_type, legal_name_s # Assertions assert business.identifier == identifier - assert business.founding_date.replace(tzinfo=None) == effective_date.replace( - tzinfo=None - ) - assert ( - business.entity_type - == filing["filing"]["conversion"]["nameRequest"]["legalType"] - ) + assert business.founding_date.replace(tzinfo=None) == effective_date.replace(tzinfo=None) + assert business.entity_type == filing["filing"]["conversion"]["nameRequest"]["legalType"] assert business.legal_name == f"{business.identifier[2:]} {legal_name_suffix}" - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. assert len(business.share_classes.all()) == 2 assert len(business.entity_roles.all()) == 2 assert len(filing_rec.filing_entity_roles.all()) == 3 diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_court_order.py b/queue_services/entity-filer/tests/unit/filing_processors/test_court_order.py index e1c0524989..5b771a9d10 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_court_order.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_court_order.py @@ -18,8 +18,7 @@ from business_model import DocumentType, Filing from registry_schemas.example_data import COURT_ORDER_FILING_TEMPLATE -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -41,14 +40,8 @@ def test_worker_court_order(app, session): # Check outcome final_filing = Filing.find_by_id(filing_id) - assert ( - filing["filing"]["courtOrder"]["fileNumber"] - == final_filing.court_order_file_number - ) - assert ( - filing["filing"]["courtOrder"]["effectOfOrder"] - == final_filing.court_order_effect_of_order - ) + assert filing["filing"]["courtOrder"]["fileNumber"] == final_filing.court_order_file_number + assert filing["filing"]["courtOrder"]["effectOfOrder"] == final_filing.court_order_effect_of_order assert filing["filing"]["courtOrder"]["orderDetails"] == final_filing.order_details court_order_file = final_filing.documents.one_or_none() diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_dissolution.py b/queue_services/entity-filer/tests/unit/filing_processors/test_dissolution.py index 200f3d184d..4d9004cf97 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_dissolution.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_dissolution.py @@ -16,20 +16,17 @@ from datetime import datetime import pytest - -from business_model import EntityRole, LegalEntity, Office, OfficeType, Filing -from business_model import DocumentType +from business_model import DocumentType, EntityRole, Filing, LegalEntity, Office, OfficeType from business_model.utils.legislation_datetime import LegislationDatetime - from registry_schemas.example_data import DISSOLUTION, FILING_HEADER -from entity_filer.filing_meta import FilingMeta - -# from tests.utils import upload_file, assert_pdf_contains_text, has_expected_date_str_format +from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import dissolution from tests import has_expected_date_str_format from tests.unit import create_business, create_filing +# from tests.utils import upload_file, assert_pdf_contains_text, has_expected_date_str_format + @pytest.mark.parametrize( "legal_type,identifier,dissolution_type", @@ -65,9 +62,7 @@ def test_dissolution(app, session, legal_type, identifier, dissolution_type): if legal_type == LegalEntity.EntityTypes.COOP.value: affidavit_uploaded_by_user_file_key = "fake-key" - filing_json["filing"]["dissolution"][ - "affidavitFileKey" - ] = affidavit_uploaded_by_user_file_key + filing_json["filing"]["dissolution"]["affidavitFileKey"] = affidavit_uploaded_by_user_file_key business = create_business(identifier, legal_type=legal_type) member = LegalEntity( @@ -90,7 +85,6 @@ def test_dissolution(app, session, legal_type, identifier, dissolution_type): legal_entity_id=business.id, ) party_role.save() - curr_roles = len(business.entity_roles.all()) business.dissolution_date = None business_id = business.id @@ -109,7 +103,6 @@ def test_dissolution(app, session, legal_type, identifier, dissolution_type): assert business.state == LegalEntity.State.HISTORICAL assert business.state_filing_id == filing.id assert len(business.entity_roles.all()) == 2 - entity_roles = filing.filing_entity_roles.all() assert len(filing.filing_entity_roles.all()) == 2 custodial_office = ( @@ -138,17 +131,11 @@ def test_dissolution(app, session, legal_type, identifier, dissolution_type): LegalEntity.EntityTypes.SOLE_PROP.value, LegalEntity.EntityTypes.PARTNERSHIP.value, ): - expected_dissolution_date = datetime.fromisoformat( - f"{dissolution_date}T07:00:00+00:00" - ) + expected_dissolution_date = datetime.fromisoformat(f"{dissolution_date}T07:00:00+00:00") - expected_dissolution_date_str = LegislationDatetime.format_as_legislation_date( - expected_dissolution_date - ) + expected_dissolution_date_str = LegislationDatetime.format_as_legislation_date(expected_dissolution_date) assert business.dissolution_date == expected_dissolution_date - dissolution_date_format_correct = has_expected_date_str_format( - expected_dissolution_date_str, "%Y-%m-%d" - ) + dissolution_date_format_correct = has_expected_date_str_format(expected_dissolution_date_str, "%Y-%m-%d") assert dissolution_date_format_correct assert filing_meta.dissolution["dissolutionDate"] == expected_dissolution_date_str @@ -160,9 +147,7 @@ def test_dissolution(app, session, legal_type, identifier, dissolution_type): ("GP", "FM1234567", "administrative"), ], ) -def test_administrative_dissolution( - app, session, legal_type, identifier, dissolution_type -): +def test_administrative_dissolution(app, session, legal_type, identifier, dissolution_type): """Assert that the dissolution is processed.""" # setup filing_json = copy.deepcopy(FILING_HEADER) @@ -200,7 +185,6 @@ def test_administrative_dissolution( legal_entity_id=business.id, ) party_role.save() - curr_roles = len(business.entity_roles.all()) business.dissolution_date = None business_id = business.id @@ -244,12 +228,8 @@ def test_administrative_dissolution( assert filing_meta.dissolution["dissolutionType"] == dissolution_type - dissolution_date_str = LegislationDatetime.format_as_legislation_date( - filing.effective_date - ) - dissolution_date_format_correct = has_expected_date_str_format( - dissolution_date_str, "%Y-%m-%d" - ) + dissolution_date_str = LegislationDatetime.format_as_legislation_date(filing.effective_date) + dissolution_date_format_correct = has_expected_date_str_format(dissolution_date_str, "%Y-%m-%d") assert dissolution_date_format_correct assert filing_meta.dissolution["dissolutionDate"] == dissolution_date_str diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_incorporation_filing.py b/queue_services/entity-filer/tests/unit/filing_processors/test_incorporation_filing.py index 98206a7c59..6a9feb7003 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_incorporation_filing.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_incorporation_filing.py @@ -19,37 +19,22 @@ from unittest.mock import patch import pytest -from business_model import LegalEntity, Filing +from business_model import DocumentType, Filing, LegalEntity from business_model.models.colin_event_id import ColinEventId -from business_model import DocumentType from registry_schemas.example_data import INCORPORATION_FILING_TEMPLATE from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import incorporation_filing from entity_filer.filing_processors.filing_components import legal_entity_info -from tests.unit import create_filing -from tests.unit import nested_session - +from tests.unit import create_filing, nested_session COOP_INCORPORATION_FILING_TEMPLATE = copy.deepcopy(INCORPORATION_FILING_TEMPLATE) -del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["offices"][ - "recordsOffice" -] -del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["parties"][ - 1 -] -del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"][ - "shareStructure" -] -del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"][ - "incorporationAgreement" -] -COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["nameRequest"][ - "legalType" -] = "CP" -COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"][ - "cooperative" -] = { +del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["offices"]["recordsOffice"] +del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["parties"][1] +del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["shareStructure"] +del COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["incorporationAgreement"] +COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["nameRequest"]["legalType"] = "CP" +COOP_INCORPORATION_FILING_TEMPLATE["filing"]["incorporationApplication"]["cooperative"] = { "cooperativeAssociationType": "CP", "rulesFileKey": "cooperative/fa00c6bf-eaad-4a07-a3d2-4786ecd6b83b.jpg", "memorandumFileKey": "cooperative/f722bf16-86be-430d-928d-5529853a3a2c.pdf", @@ -73,25 +58,17 @@ ("CC", copy.deepcopy(INCORPORATION_FILING_TEMPLATE), "BC0001095"), ], ) -def test_incorporation_filing_process_with_nr( - app, session, legal_type, filing, next_corp_num -): +def test_incorporation_filing_process_with_nr(app, session, legal_type, filing, next_corp_num): """Assert that the incorporation object is correctly populated to model objects.""" # setup with nested_session(session): with patch.object( legal_entity_info, "get_next_corp_num", return_value=next_corp_num - ) as mock_get_next_corp_num: + ) as mock_get_next_corp_num: # noqa F841 identifier = "NR 1234567" - filing["filing"]["incorporationApplication"]["nameRequest"][ - "nrNumber" - ] = identifier - filing["filing"]["incorporationApplication"]["nameRequest"][ - "legalType" - ] = legal_type - filing["filing"]["incorporationApplication"]["nameRequest"][ - "legalName" - ] = "Test" + filing["filing"]["incorporationApplication"]["nameRequest"]["nrNumber"] = identifier + filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] = legal_type + filing["filing"]["incorporationApplication"]["nameRequest"]["legalName"] = "Test" if legal_type not in ("CC", "CP"): del filing["filing"]["incorporationApplication"]["courtOrder"] # if legal_type == 'CP': @@ -108,34 +85,20 @@ def test_incorporation_filing_process_with_nr( filing_meta = FilingMeta(application_date=effective_date) # test - business, filing_rec, filing_meta = incorporation_filing.process( - None, filing, filing_rec, filing_meta - ) + business, filing_rec, filing_meta = incorporation_filing.process(None, filing, filing_rec, filing_meta) # Assertions assert business.identifier == next_corp_num assert business.founding_date.replace(tzinfo=None) == effective_date - assert ( - business.entity_type - == filing["filing"]["incorporationApplication"]["nameRequest"][ - "legalType" - ] - ) - assert ( - business.legal_name - == filing["filing"]["incorporationApplication"]["nameRequest"][ - "legalName" - ] - ) + assert business.entity_type == filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] + assert business.legal_name == filing["filing"]["incorporationApplication"]["nameRequest"]["legalName"] assert business.state == LegalEntity.State.ACTIVE entity_roles = business.entity_roles.all() if legal_type in ("BC", "BEN", "ULC", "CC"): assert len(entity_roles) == 2 assert len(filing_rec.filing_entity_roles.all()) == 3 assert len(business.share_classes.all()) == 2 - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. if legal_type == "CC": assert len(entity_roles) == 2 assert filing_rec.court_order_file_number == "12356" @@ -148,7 +111,7 @@ def test_incorporation_filing_process_with_nr( # assert len(documents) == 2 # for document in documents: # if document.type == DocumentType.COOP_RULES.value: - # original_rules_key = filing['filing']['incorporationApplication']['cooperative']['rulesFileKey'] + # original_rules_key = filing['filing']['incorporationApplication']['cooperative']['rulesFileKey'] # noqa E501; line too long # assert document.file_key == original_rules_key # assert MinioService.get_file(document.file_key) # elif document.type == DocumentType.COOP_MEMORANDUM.value: @@ -183,18 +146,12 @@ def test_incorporation_filing_process_with_nr( ), ], ) -def test_incorporation_filing_process_no_nr( - app, session, legal_type, filing, legal_name_suffix -): +def test_incorporation_filing_process_no_nr(app, session, legal_type, filing, legal_name_suffix): """Assert that the incorporation object is correctly populated to model objects.""" # setup next_corp_num = "BC0001095" - with patch.object( - legal_entity_info, "get_next_corp_num", return_value=next_corp_num - ) as mock_get_next_corp_num: - filing["filing"]["incorporationApplication"]["nameRequest"][ - "legalType" - ] = legal_type + with patch.object(legal_entity_info, "get_next_corp_num", return_value=next_corp_num) as mock_get_next_corp_num: + filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] = legal_type create_filing("123", filing) effective_date = datetime.utcnow() @@ -202,9 +159,7 @@ def test_incorporation_filing_process_no_nr( filing_meta = FilingMeta(application_date=filing_rec.effective_date) # test - business, filing_rec, filing_meta = incorporation_filing.process( - None, filing, filing_rec, filing_meta - ) + business, filing_rec, filing_meta = incorporation_filing.process(None, filing, filing_rec, filing_meta) # Assertions assert business.identifier == next_corp_num @@ -212,18 +167,14 @@ def test_incorporation_filing_process_no_nr( assert business.entity_type == legal_type assert business.legal_name == f"{business.identifier[2:]} {legal_name_suffix}" assert len(business.share_classes.all()) == 2 - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. assert len(business.entity_roles.all()) == 2 assert len(filing_rec.filing_entity_roles.all()) == 3 assert filing_rec.court_order_file_number == "12356" assert filing_rec.court_order_effect_of_order == "planOfArrangement" # Parties - parties = filing_rec.filing_json["filing"]["incorporationApplication"][ - "parties" - ] + parties = filing_rec.filing_json["filing"]["incorporationApplication"]["parties"] assert parties[0]["officer"]["firstName"] == "Joe" assert parties[0]["officer"]["lastName"] == "Swanson" assert parties[0]["officer"]["middleName"] == "P" @@ -231,9 +182,7 @@ def test_incorporation_filing_process_no_nr( assert parties[1]["officer"]["partyType"] == "organization" assert parties[1]["officer"]["organizationName"] == "Xyz Inc." - mock_get_next_corp_num.assert_called_with( - filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] - ) + mock_get_next_corp_num.assert_called_with(filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"]) @pytest.mark.parametrize( @@ -248,13 +197,11 @@ def test_get_next_corp_num(requests_mock, mocker, app, test_name, response, expe """Assert that the corpnum is the correct format.""" from flask import current_app - mocker.patch("legal_api.services.bootstrap.AccountService.get_bearer_token", return_value='') + mocker.patch("legal_api.services.bootstrap.AccountService.get_bearer_token", return_value="") with app.app_context(): current_app.config["COLIN_API"] = "http://localhost" - requests_mock.post( - f'{current_app.config["COLIN_API"]}/BC', json={"corpNum": response} - ) + requests_mock.post(f'{current_app.config["COLIN_API"]}/BC', json={"corpNum": response}) corp_num = legal_entity_info.get_next_corp_num("BEN") @@ -289,21 +236,14 @@ def test_incorporation_filing_coop_from_colin(app, session): filing_meta = FilingMeta(application_date=filing_rec.effective_date) # test - business, filing_rec, filing_meta = incorporation_filing.process( - None, filing, filing_rec, filing_meta - ) + business, filing_rec, filing_meta = incorporation_filing.process(None, filing, filing_rec, filing_meta) # Assertions assert business.identifier == corp_num assert business.founding_date.replace(tzinfo=None) == effective_date - assert ( - business.entity_type - == filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] - ) + assert business.entity_type == filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] assert business.legal_name == "Test" - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. @pytest.mark.parametrize( @@ -314,9 +254,7 @@ def test_incorporation_filing_coop_from_colin(app, session): ("CC", "B.C. COMMUNITY CONTRIBUTION COMPANY LTD."), ], ) -def test_incorporation_filing_bc_company_from_colin( - app, session, legal_type, legal_name_suffix -): +def test_incorporation_filing_bc_company_from_colin(app, session, legal_type, legal_name_suffix): """Assert that an existing bc company(LTD, ULC, CCC) incorporation is loaded corrrectly.""" # setup with nested_session(session): @@ -327,9 +265,7 @@ def test_incorporation_filing_bc_company_from_colin( # Change the template to be LTD, ULC or CCC filing["filing"]["business"]["legalType"] = legal_type filing["filing"]["business"]["identifier"] = corp_num - filing["filing"]["incorporationApplication"]["nameRequest"][ - "legalType" - ] = legal_type + filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] = legal_type effective_date = datetime.utcnow() # Create the Filing object in the DB filing_rec = Filing(effective_date=effective_date, filing_json=filing) @@ -350,13 +286,8 @@ def test_incorporation_filing_bc_company_from_colin( # Assertions assert business.identifier == corp_num assert business.founding_date.replace(tzinfo=None) == effective_date - assert ( - business.entity_type - == filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] - ) + assert business.entity_type == filing["filing"]["incorporationApplication"]["nameRequest"]["legalType"] assert business.legal_name == f"{business.identifier[2:]} {legal_name_suffix}" - assert ( - len(business.offices.all()) == 2 - ) # One office is created in create_business method. + assert len(business.offices.all()) == 2 # One office is created in create_business method. assert len(business.share_classes.all()) == 2 assert len(business.entity_roles.all()) == 2 diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_on.py b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_on.py index 8467e42d70..78b9e387be 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_on.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_on.py @@ -15,8 +15,8 @@ import copy import random -from business_model import LegalEntity, Filing -from registry_schemas.example_data import PUT_BACK_ON, FILING_HEADER +from business_model import Filing, LegalEntity +from registry_schemas.example_data import FILING_HEADER, PUT_BACK_ON from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import put_back_on @@ -36,8 +36,6 @@ def test_worker_put_back_on(app, session): payment_id = str(random.SystemRandom().getrandbits(0x58)) filing = create_filing(payment_id, filing_json, business_id=business.id) - filing_msg = {"filing": {"id": filing.id}} - filing_meta = FilingMeta() filing = create_filing("123", filing_json) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_notation.py b/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_notation.py index 482a529b09..c0959b03a5 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_notation.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_notation.py @@ -18,8 +18,7 @@ from business_model import Filing from registry_schemas.example_data import REGISTRARS_NOTATION_FILING_TEMPLATE -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -41,15 +40,6 @@ def test_worker_registrars_notation(app, session): # Check outcome final_filing = Filing.find_by_id(filing_id) - assert ( - filing["filing"]["registrarsNotation"]["fileNumber"] - == final_filing.court_order_file_number - ) - assert ( - filing["filing"]["registrarsNotation"]["effectOfOrder"] - == final_filing.court_order_effect_of_order - ) - assert ( - filing["filing"]["registrarsNotation"]["orderDetails"] - == final_filing.order_details - ) + assert filing["filing"]["registrarsNotation"]["fileNumber"] == final_filing.court_order_file_number + assert filing["filing"]["registrarsNotation"]["effectOfOrder"] == final_filing.court_order_effect_of_order + assert filing["filing"]["registrarsNotation"]["orderDetails"] == final_filing.order_details diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_order.py b/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_order.py index 8a7778107a..aaefd989de 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_order.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_registrars_order.py @@ -18,8 +18,7 @@ from business_model import Filing from registry_schemas.example_data import REGISTRARS_ORDER_FILING_TEMPLATE -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -41,15 +40,6 @@ def test_worker_registrars_order(app, session): # Check outcome final_filing = Filing.find_by_id(filing_id) - assert ( - filing["filing"]["registrarsOrder"]["fileNumber"] - == final_filing.court_order_file_number - ) - assert ( - filing["filing"]["registrarsOrder"]["effectOfOrder"] - == final_filing.court_order_effect_of_order - ) - assert ( - filing["filing"]["registrarsOrder"]["orderDetails"] - == final_filing.order_details - ) + assert filing["filing"]["registrarsOrder"]["fileNumber"] == final_filing.court_order_file_number + assert filing["filing"]["registrarsOrder"]["effectOfOrder"] == final_filing.court_order_effect_of_order + assert filing["filing"]["registrarsOrder"]["orderDetails"] == final_filing.order_details diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_registration.py b/queue_services/entity-filer/tests/unit/filing_processors/test_registration.py index b9cf67024f..845cf3eec2 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_registration.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_registration.py @@ -16,22 +16,19 @@ import copy from datetime import datetime from http import HTTPStatus -from unittest.mock import patch, call +from unittest.mock import call, patch import pytest -from business_model import LegalEntity, Filing, RegistrationBootstrap +from business_model import Filing, LegalEntity, RegistrationBootstrap # from legal_api.services import NaicsService from registry_schemas.example_data import FILING_HEADER, REGISTRATION from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import registration +from entity_filer.filing_processors.filing_components.legal_entity_info import NaicsService from tests.unit import create_filing, nested_session -from entity_filer.filing_processors.filing_components.legal_entity_info import ( - NaicsService, -) - now = "2023-01-08" GP_REGISTRATION = copy.deepcopy(FILING_HEADER) @@ -80,19 +77,14 @@ def test_registration_process(app, session, legal_type, filing): # test with patch.object(NaicsService, "find_by_code", return_value=naics_response): - business, filing_rec, filing_meta = registration.process( - None, filing, filing_rec, filing_meta - ) + business, filing_rec, filing_meta = registration.process(None, filing, filing_rec, filing_meta) # Assertions # Legal Entity assert business.identifier.startswith("FM") assert business.founding_date == effective_date assert business.start_date == datetime.fromisoformat(f"{now}T08:00:00+00:00") - assert ( - business.entity_type - == filing["filing"]["registration"]["nameRequest"]["legalType"] - ) + assert business.entity_type == filing["filing"]["registration"]["nameRequest"]["legalType"] assert business.tax_id == REGISTRATION["business"]["taxId"] assert business.state == LegalEntity.State.ACTIVE assert len(filing_rec.filing_entity_roles.all()) == 3 @@ -104,19 +96,13 @@ def test_registration_process(app, session, legal_type, filing): # NAICS assert business.naics_code == REGISTRATION["business"]["naics"]["naicsCode"] - assert ( - business.naics_description - == REGISTRATION["business"]["naics"]["naicsDescription"] - ) + assert business.naics_description == REGISTRATION["business"]["naics"]["naicsDescription"] # AlternateNames assert len(business.alternate_names.all()) > 0 alternate_name = business.alternate_names[0] assert alternate_name.identifier.startswith("FM") - assert ( - alternate_name.name - == filing["filing"]["registration"]["nameRequest"]["legalName"] - ) + assert alternate_name.name == filing["filing"]["registration"]["nameRequest"]["legalName"] @pytest.mark.parametrize( @@ -148,9 +134,7 @@ def test_sp_registration_process(app, session, legal_type, filing): # test with patch.object(NaicsService, "find_by_code", return_value=naics_response): - business, filing_rec, filing_meta = registration.process( - None, filing, filing_rec, filing_meta - ) + business, filing_rec, filing_meta = registration.process(None, filing, filing_rec, filing_meta) # Assertions # assert business.founding_date.replace(tzinfo=None) == effective_date @@ -159,15 +143,10 @@ def test_sp_registration_process(app, session, legal_type, filing): alternate_name = business.alternate_names.all()[0] # alternate_name = business.alternate_names - assert alternate_name.start_date == datetime.fromisoformat( - f"{now}T08:00:00+00:00" - ) + assert alternate_name.start_date == datetime.fromisoformat(f"{now}T08:00:00+00:00") assert alternate_name.identifier.startswith("FM") - assert ( - alternate_name.name - == filing["filing"]["registration"]["nameRequest"]["legalName"] - ) + assert alternate_name.name == filing["filing"]["registration"]["nameRequest"]["legalName"] # TODO I don't think it makes sens to be changing or setting # a natural person's NAICS codes. Maybe this is an Alias/DBA thing # assert business.naics_code == REGISTRATION['business']['naics']['naicsCode'] diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_special_resolution.py b/queue_services/entity-filer/tests/unit/filing_processors/test_special_resolution.py index 9d1b173803..802ab13d0b 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_special_resolution.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_special_resolution.py @@ -13,16 +13,14 @@ # limitations under the License. """The Unit Tests for the Special Resolution filing.""" import copy -import pytest -from registry_schemas.example_data import ( - SPECIAL_RESOLUTION as special_resolution_json, - FILING_HEADER, -) +import pytest +from business_model import Resolution +from registry_schemas.example_data import FILING_HEADER +from registry_schemas.example_data import SPECIAL_RESOLUTION as special_resolution_json from entity_filer.filing_processors import special_resolution from tests.unit import create_business, create_filing -from business_model import Resolution @pytest.mark.parametrize( @@ -31,9 +29,7 @@ ("CP", "CP1234567", "specialResolution"), ], ) -def test_special_resolution( - app, session, legal_type, identifier, special_resolution_type -): +def test_special_resolution(app, session, legal_type, identifier, special_resolution_type): """Assert that the resolution is processed.""" # setup filing_json = copy.deepcopy(FILING_HEADER) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_transition.py b/queue_services/entity-filer/tests/unit/filing_processors/test_transition.py index e423e854a6..a3e8ad4221 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_transition.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_transition.py @@ -43,15 +43,9 @@ def test_transition_filing_process(app, session): # Assertions assert business.restriction_ind is False - assert len(business.share_classes.all()) == len( - filing["filing"]["transition"]["shareStructure"]["shareClasses"] - ) + assert len(business.share_classes.all()) == len(filing["filing"]["transition"]["shareStructure"]["shareClasses"]) assert len(business.offices.all()) == len(filing["filing"]["transition"]["offices"]) - assert len(business.aliases.all()) == len( - filing["filing"]["transition"]["nameTranslations"] - ) - assert len(business.resolutions.all()) == len( - filing["filing"]["transition"]["shareStructure"]["resolutionDates"] - ) + assert len(business.aliases.all()) == len(filing["filing"]["transition"]["nameTranslations"]) + assert len(business.resolutions.all()) == len(filing["filing"]["transition"]["shareStructure"]["resolutionDates"]) assert len(business.entity_roles.all()) == 1 assert len(filing_rec.filing_entity_roles.all()) == 1 diff --git a/queue_services/entity-filer/tests/unit/worker/test_agm_extension.py b/queue_services/entity-filer/tests/unit/worker/test_agm_extension.py index a4d0b949fe..df2b754a3e 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_agm_extension.py +++ b/queue_services/entity-filer/tests/unit/worker/test_agm_extension.py @@ -19,17 +19,11 @@ from business_model import Filing from registry_schemas.example_data import AGM_EXTENSION, FILING_HEADER -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing -@pytest.mark.parametrize( - "test_name", - [ - ("general"), ("first_agm_year"), ("more_extension"), ("final_extension") - ] -) +@pytest.mark.parametrize("test_name", [("general"), ("first_agm_year"), ("more_extension"), ("final_extension")]) def test_worker_agm_extension(app, session, mocker, test_name): """Assert that the agm extension object is correctly populated to model objects.""" identifier = "BC1234567" diff --git a/queue_services/entity-filer/tests/unit/worker/test_amalgamation_application.py b/queue_services/entity-filer/tests/unit/worker/test_amalgamation_application.py index 300fdf50bb..6d5bf453ff 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_amalgamation_application.py +++ b/queue_services/entity-filer/tests/unit/worker/test_amalgamation_application.py @@ -23,8 +23,7 @@ from registry_schemas.example_data import AMALGAMATION_APPLICATION from entity_filer.filing_processors.filing_components import legal_entity_info -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_entity, create_filing @@ -40,18 +39,17 @@ def test_amalgamation_application_process(app, session): amalgamating_business_2_id = create_entity(amalgamating_identifier_2, "BC", "amalgamating business 2").id filing = {"filing": {}} - filing["filing"]["header"] = {"name": filing_type, "date": "2019-04-08", - "certifiedBy": "full name", "email": "no_one@never.get", "filingId": 1} + filing["filing"]["header"] = { + "name": filing_type, + "date": "2019-04-08", + "certifiedBy": "full name", + "email": "no_one@never.get", + "filingId": 1, + } filing["filing"][filing_type] = copy.deepcopy(AMALGAMATION_APPLICATION) filing["filing"][filing_type]["amalgamatingBusinesses"] = [ - { - "role": "amalgamating", - "identifier": amalgamating_identifier_1 - }, - { - "role": "amalgamating", - "identifier": amalgamating_identifier_2 - } + {"role": "amalgamating", "identifier": amalgamating_identifier_1}, + {"role": "amalgamating", "identifier": amalgamating_identifier_2}, ] filing["filing"][filing_type]["nameRequest"]["nrNumber"] = nr_identifier diff --git a/queue_services/entity-filer/tests/unit/worker/test_change_of_registration.py b/queue_services/entity-filer/tests/unit/worker/test_change_of_registration.py index f94069e775..8f372f22e9 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_change_of_registration.py +++ b/queue_services/entity-filer/tests/unit/worker/test_change_of_registration.py @@ -14,56 +14,43 @@ """The Unit Tests for the Change of Registration filing.""" import copy import random - -# from datetime import datetime -from business_model.utils.datetime import datetime from typing import Final from unittest.mock import patch import pytest -from business_model import Address, AlternateName, LegalEntity, Filing, EntityRole +from business_model import Address, AlternateName, EntityRole, Filing, LegalEntity -# from legal_api.services import NaicsService -from entity_filer.filing_processors.filing_components.legal_entity_info import ( - NaicsService, -) -from registry_schemas.example_data import ( - CHANGE_OF_REGISTRATION_TEMPLATE, - COURT_ORDER, - REGISTRATION, -) +# from datetime import datetime +from business_model.utils.datetime import datetime +from registry_schemas.example_data import CHANGE_OF_REGISTRATION_TEMPLATE, COURT_ORDER, REGISTRATION -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +# from legal_api.services import NaicsService +from entity_filer.filing_processors.filing_components.legal_entity_info import NaicsService +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import ( create_entity, - create_office, - create_office_address, create_entity_person, create_entity_role, create_filing, + create_office, + create_office_address, ) - CONTACT_POINT = {"email": "no_one@never.get", "phone": "123-456-7890"} GP_CHANGE_OF_REGISTRATION = copy.deepcopy(CHANGE_OF_REGISTRATION_TEMPLATE) -GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"].append( - REGISTRATION["parties"][1] -) -del GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0]['officer']['id'] -del GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][1]['officer']['id'] +GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"].append(REGISTRATION["parties"][1]) +del GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0]["officer"]["id"] +del GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][1]["officer"]["id"] SP_CHANGE_OF_REGISTRATION = copy.deepcopy(CHANGE_OF_REGISTRATION_TEMPLATE) SP_CHANGE_OF_REGISTRATION["filing"]["business"]["legalType"] = "SP" -SP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["nameRequest"][ - "legalType" -] = "SP" +SP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["nameRequest"]["legalType"] = "SP" SP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0]["roles"] = [ {"roleType": "Completing Party", "appointmentDate": "2022-01-01"}, {"roleType": "Proprietor", "appointmentDate": "2022-01-01"}, ] -del SP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0]['officer']['id'] +del SP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0]["officer"]["id"] naics_response = { "code": REGISTRATION["business"]["naics"]["naicsCode"], @@ -91,24 +78,22 @@ def test_change_of_registration_legal_name_sp( """Assert the worker process calls the legal name change correctly.""" identifier = "FM1234567" - + filing = copy.deepcopy(filing_template) if test_name == "name_change": - filing["filing"]["changeOfRegistration"]["nameRequest"][ - "legalName" - ] = new_legal_name + filing["filing"]["changeOfRegistration"]["nameRequest"]["legalName"] = new_legal_name else: del filing["filing"]["changeOfRegistration"]["nameRequest"] payment_id = str(random.SystemRandom().getrandbits(0x58)) - proprietor_identifier = 'P1234567' + proprietor_identifier = "P1234567" proprietor = create_entity(proprietor_identifier, "person", "my self old") - filing["filing"]["changeOfRegistration"]["parties"][0]['officer']['id'] = proprietor.id + filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["id"] = proprietor.id filing["filing"]["business"]["identifier"] = identifier filing = create_filing(payment_id, filing) - + alternate_name = AlternateName( identifier=identifier, name_type=AlternateName.NameType.OPERATING, @@ -121,11 +106,11 @@ def test_change_of_registration_legal_name_sp( proprietor.alternate_names.append(alternate_name) proprietor.save() proprietor_id = proprietor.id - + filing_id = filing.id filing.legal_entity_id = proprietor_id filing.save() - + filing_msg = FilingMessage(filing_identifier=filing_id) # mock out the email sender and event publishing @@ -176,12 +161,10 @@ def test_change_of_registration_legal_name_gp( """Assert the worker process calls the legal name change correctly.""" identifier = "FM1234567" - + filing = copy.deepcopy(filing_template) if test_name == "name_change": - filing["filing"]["changeOfRegistration"]["nameRequest"][ - "legalName" - ] = new_legal_name + filing["filing"]["changeOfRegistration"]["nameRequest"]["legalName"] = new_legal_name else: del filing["filing"]["changeOfRegistration"]["nameRequest"] @@ -190,7 +173,7 @@ def test_change_of_registration_legal_name_gp( filing = create_filing(payment_id, filing) business = create_entity(identifier, legal_type, legal_name) - + alternate_name = AlternateName( identifier=identifier, name_type=AlternateName.NameType.OPERATING, @@ -206,7 +189,7 @@ def test_change_of_registration_legal_name_gp( filing_id = filing.id filing.legal_entity_id = business_id filing.save() - + filing_msg = FilingMessage(filing_identifier=filing_id) # mock out the email sender and event publishing @@ -265,12 +248,12 @@ def test_change_of_registration_business_address( del filing["filing"]["changeOfRegistration"]["nameRequest"] del filing["filing"]["changeOfRegistration"]["parties"] - filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"][ - "deliveryAddress" - ]["id"] = business_delivery_address_id - filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"][ - "mailingAddress" - ]["id"] = business_mailing_address_id + filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"]["deliveryAddress"][ + "id" + ] = business_delivery_address_id + filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"]["mailingAddress"][ + "id" + ] = business_mailing_address_id payment_id = str(random.SystemRandom().getrandbits(0x58)) @@ -297,17 +280,13 @@ def test_change_of_registration_business_address( for key in ["streetAddress", "postalCode", "addressCity", "addressRegion"]: assert ( changed_delivery_address.json[key] - == filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"][ - "deliveryAddress" - ][key] + == filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"]["deliveryAddress"][key] ) changed_mailing_address = Address.find_by_id(business_mailing_address_id) for key in ["streetAddress", "postalCode", "addressCity", "addressRegion"]: assert ( changed_mailing_address.json[key] - == filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"][ - "mailingAddress" - ][key] + == filing["filing"]["changeOfRegistration"]["offices"]["businessOffice"]["mailingAddress"][key] ) @@ -318,9 +297,7 @@ def test_change_of_registration_business_address( ("sp_court_order", "SP", SP_CHANGE_OF_REGISTRATION), ], ) -def test_worker_change_of_registration_court_order( - app, session, mocker, test_name, legal_type, filing_template -): +def test_worker_change_of_registration_court_order(app, session, mocker, test_name, legal_type, filing_template): """Assert the worker process the court order correctly.""" identifier = "FM1234567" business = create_entity(identifier, legal_type, "Test Entity") @@ -334,9 +311,7 @@ def test_worker_change_of_registration_court_order( filing["filing"]["changeOfRegistration"]["contactPoint"] = CONTACT_POINT filing["filing"]["changeOfRegistration"]["courtOrder"] = COURT_ORDER - filing["filing"]["changeOfRegistration"]["courtOrder"][ - "effectOfOrder" - ] = effect_of_order + filing["filing"]["changeOfRegistration"]["courtOrder"]["effectOfOrder"] = effect_of_order del filing["filing"]["changeOfRegistration"]["nameRequest"] del filing["filing"]["changeOfRegistration"]["parties"] @@ -371,9 +346,7 @@ def test_worker_proprietor_name_and_address_change(app, session, mocker): business = create_entity(identifier, "SP", "Test Entity") business_id = business.id - party = create_entity_person( - SP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0] - ) + party = create_entity_person(SP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0]) party_id = party.id create_entity_role(business, party, ["proprietor"], datetime.utcnow()) @@ -381,18 +354,10 @@ def test_worker_proprietor_name_and_address_change(app, session, mocker): filing = copy.deepcopy(SP_CHANGE_OF_REGISTRATION) filing["filing"]["changeOfRegistration"]["contactPoint"] = CONTACT_POINT filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["id"] = party_id - filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "firstName" - ] = "New Name" - filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "middleInitial" - ] = "New Name" - filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] = "New Name" - filing["filing"]["changeOfRegistration"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] = "New Name" + filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["firstName"] = "New Name" + filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["middleInitial"] = "New Name" + filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"]["streetAddress"] = "New Name" + filing["filing"]["changeOfRegistration"]["parties"][0]["deliveryAddress"]["streetAddress"] = "New Name" del filing["filing"]["changeOfRegistration"]["nameRequest"] @@ -416,23 +381,14 @@ def test_worker_proprietor_name_and_address_change(app, session, mocker): # Check outcome business = LegalEntity.find_by_internal_id(business_id) party = business.entity_roles.all()[0].related_entity - assert ( - party.first_name - == filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "firstName" - ].upper() - ) + assert party.first_name == filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["firstName"].upper() assert ( party.entity_delivery_address.street - == filing["filing"]["changeOfRegistration"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] + == filing["filing"]["changeOfRegistration"]["parties"][0]["deliveryAddress"]["streetAddress"] ) assert ( party.entity_mailing_address.street - == filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] + == filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"]["streetAddress"] ) @@ -450,13 +406,9 @@ def test_worker_partner_name_and_address_change(app, session, mocker, test_name) business = create_entity(identifier, "GP", "Test Entity") business_id = business.id - party1 = create_entity_person( - GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0] - ) + party1 = create_entity_person(GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][0]) party_id_1 = party1.id - party2 = create_entity_person( - GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][1] - ) + party2 = create_entity_person(GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][1]) party_id_2 = party2.id create_entity_role(business, party1, ["partner"], datetime.utcnow()) @@ -466,38 +418,20 @@ def test_worker_partner_name_and_address_change(app, session, mocker, test_name) filing["filing"]["changeOfRegistration"]["contactPoint"] = CONTACT_POINT if test_name == "gp_add_partner": - filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "id" - ] = party_id_1 - filing["filing"]["changeOfRegistration"]["parties"][1]["officer"][ - "id" - ] = party_id_2 - new_party_json = GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"][ - "parties" - ][1] + filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["id"] = party_id_1 + filing["filing"]["changeOfRegistration"]["parties"][1]["officer"]["id"] = party_id_2 + new_party_json = GP_CHANGE_OF_REGISTRATION["filing"]["changeOfRegistration"]["parties"][1] del new_party_json["officer"]["id"] new_party_json["officer"]["firstName"] = "New Name" filing["filing"]["changeOfRegistration"]["parties"].append(new_party_json) if test_name == "gp_edit_partner_name_and_address": - filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "id" - ] = party_id_1 - filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "firstName" - ] = "New Name a" - filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "middleInitial" - ] = "New Name a" - filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] = "New Name" - filing["filing"]["changeOfRegistration"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] = "New Name" - filing["filing"]["changeOfRegistration"]["parties"][1]["officer"][ - "id" - ] = party_id_2 + filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["id"] = party_id_1 + filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["firstName"] = "New Name a" + filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["middleInitial"] = "New Name a" + filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"]["streetAddress"] = "New Name" + filing["filing"]["changeOfRegistration"]["parties"][0]["deliveryAddress"]["streetAddress"] = "New Name" + filing["filing"]["changeOfRegistration"]["parties"][1]["officer"]["id"] = party_id_2 if test_name == "gp_delete_partner": del filing["filing"]["changeOfRegistration"]["parties"][1] @@ -534,22 +468,15 @@ def test_worker_partner_name_and_address_change(app, session, mocker, test_name) if test_name == "gp_edit_partner_name_and_address": party = business.entity_roles.all()[0].related_entity assert ( - party.first_name - == filing["filing"]["changeOfRegistration"]["parties"][0]["officer"][ - "firstName" - ].upper() + party.first_name == filing["filing"]["changeOfRegistration"]["parties"][0]["officer"]["firstName"].upper() ) assert ( party.entity_delivery_address.street - == filing["filing"]["changeOfRegistration"]["parties"][0][ - "deliveryAddress" - ]["streetAddress"] + == filing["filing"]["changeOfRegistration"]["parties"][0]["deliveryAddress"]["streetAddress"] ) assert ( party.entity_mailing_address.street - == filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] + == filing["filing"]["changeOfRegistration"]["parties"][0]["mailingAddress"]["streetAddress"] ) assert business.entity_roles.all()[0].cessation_date is None assert business.entity_roles.all()[1].cessation_date is None diff --git a/queue_services/entity-filer/tests/unit/worker/test_consent_continuation_out.py b/queue_services/entity-filer/tests/unit/worker/test_consent_continuation_out.py index 2cd6b89b27..5bf2ec2d97 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_consent_continuation_out.py +++ b/queue_services/entity-filer/tests/unit/worker/test_consent_continuation_out.py @@ -21,8 +21,7 @@ from business_model.utils.legislation_datetime import LegislationDatetime from registry_schemas.example_data import CONSENT_CONTINUATION_OUT, FILING_TEMPLATE -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -35,16 +34,10 @@ # DST_TO_DST is not possible. Example: 2023-11-06 (starting day of DST) + 6 months = 2024-05-06 (in STD) ], ) -def test_worker_consent_continuation_out( - app, session, mocker, test_name, effective_date, expiry_date -): +def test_worker_consent_continuation_out(app, session, mocker, test_name, effective_date, expiry_date): """Assert that the consent continuation out object is correctly populated to model objects.""" - effective_date = LegislationDatetime.as_legislation_timezone( - datetime.fromisoformat(effective_date) - ) - expiry_date = LegislationDatetime.as_legislation_timezone( - datetime.fromisoformat(expiry_date) - ) + effective_date = LegislationDatetime.as_legislation_timezone(datetime.fromisoformat(effective_date)) + expiry_date = LegislationDatetime.as_legislation_timezone(datetime.fromisoformat(expiry_date)) identifier = "BC1234567" business = create_business(identifier, legal_type="BC") @@ -86,42 +79,27 @@ def test_worker_consent_continuation_out( filing_json["filing"]["consentContinuationOut"]["courtOrder"]["effectOfOrder"] == final_filing.court_order_effect_of_order ) - assert ( - filing_json["filing"]["consentContinuationOut"]["details"] - == final_filing.order_details - ) + assert filing_json["filing"]["consentContinuationOut"]["details"] == final_filing.order_details expiry_date_utc = LegislationDatetime.as_utc_timezone(expiry_date) cco = ConsentContinuationOut.get_active_cco(business.id, expiry_date_utc) assert cco assert ( - cco[0].foreign_jurisdiction - == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"][ - "country" - ] + cco[0].foreign_jurisdiction == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"]["country"] ) assert ( cco[0].foreign_jurisdiction_region - == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"][ - "region" - ] + == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"]["region"] ) assert cco[0].expiry_date == expiry_date_utc assert ( final_filing.meta_data["consentContinuationOut"]["country"] - == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"][ - "country" - ] + == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"]["country"] ) assert ( final_filing.meta_data["consentContinuationOut"]["region"] - == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"][ - "region" - ] - ) - assert ( - final_filing.meta_data["consentContinuationOut"]["expiry"] - == expiry_date_utc.isoformat() + == filing_json["filing"]["consentContinuationOut"]["foreignJurisdiction"]["region"] ) + assert final_filing.meta_data["consentContinuationOut"]["expiry"] == expiry_date_utc.isoformat() diff --git a/queue_services/entity-filer/tests/unit/worker/test_conversion.py b/queue_services/entity-filer/tests/unit/worker/test_conversion.py index 35a370f630..6f7903b5ef 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_conversion.py +++ b/queue_services/entity-filer/tests/unit/worker/test_conversion.py @@ -14,32 +14,17 @@ """The Unit Tests for the Conversion filing.""" import copy import random - -import pytest from datetime import datetime from unittest.mock import patch -# from legal_api.services import NaicsService -from entity_filer.filing_processors.filing_components.legal_entity_info import ( - NaicsService, -) -from business_model import Address, LegalEntity, Filing, EntityRole -from registry_schemas.example_data import ( - CONVERSION_FILING_TEMPLATE, - FIRMS_CONVERSION, - COURT_ORDER, - REGISTRATION, -) - -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage -from tests.unit import ( - create_entity, - create_filing, - create_entity_person, - create_entity_role, -) +import pytest +from business_model import Address, EntityRole, Filing, LegalEntity +from registry_schemas.example_data import CONVERSION_FILING_TEMPLATE, COURT_ORDER, FIRMS_CONVERSION, REGISTRATION +# from legal_api.services import NaicsService +from entity_filer.filing_processors.filing_components.legal_entity_info import NaicsService +from entity_filer.resources.worker import FilingMessage, process_filing +from tests.unit import create_entity, create_entity_person, create_entity_role, create_filing CONTACT_POINT = {"email": "no_one@never.get", "phone": "123-456-7890"} @@ -129,16 +114,12 @@ def test_conversion( assert business.offices.first().office_type == "businessOffice" assert ( - business.naics_description - == filing_template["filing"]["conversion"]["business"]["naics"][ - "naicsDescription" - ] + business.naics_description == filing_template["filing"]["conversion"]["business"]["naics"]["naicsDescription"] ) def test_worker_proprietor_new_address(app, session, mocker): """Assert the worker process the party new address correctly.""" - identifier = "FM1234567" party = create_entity_person(SP_CONVERSION["filing"]["conversion"]["parties"][0]) party_id = party.id @@ -153,12 +134,8 @@ def test_worker_proprietor_new_address(app, session, mocker): filing = copy.deepcopy(SP_CONVERSION) filing["filing"]["conversion"]["contactPoint"] = CONTACT_POINT filing["filing"]["conversion"]["parties"][0]["officer"]["id"] = party_id - filing["filing"]["conversion"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] = "New Name" - filing["filing"]["conversion"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] = "New Name" + filing["filing"]["conversion"]["parties"][0]["mailingAddress"]["streetAddress"] = "New Name" + filing["filing"]["conversion"]["parties"][0]["deliveryAddress"]["streetAddress"] = "New Name" del filing["filing"]["conversion"]["nameRequest"] @@ -184,13 +161,9 @@ def test_worker_proprietor_new_address(app, session, mocker): assert party.entity_roles.all()[0].role_type == EntityRole.RoleTypes.proprietor assert ( party.entity_delivery_address.street - == filing["filing"]["conversion"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] + == filing["filing"]["conversion"]["parties"][0]["deliveryAddress"]["streetAddress"] ) assert ( party.entity_mailing_address.street - == filing["filing"]["conversion"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] + == filing["filing"]["conversion"]["parties"][0]["mailingAddress"]["streetAddress"] ) diff --git a/queue_services/entity-filer/tests/unit/worker/test_correction_bcia.py b/queue_services/entity-filer/tests/unit/worker/test_correction_bcia.py index 520d87090d..f453394211 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_correction_bcia.py +++ b/queue_services/entity-filer/tests/unit/worker/test_correction_bcia.py @@ -17,34 +17,27 @@ from datetime import datetime from typing import Final from unittest.mock import patch -from dateutil.parser import parse import pytest -from business_model import Address, Alias, LegalEntity, Filing, EntityRole +from business_model import Address, Alias, EntityRole, Filing, LegalEntity +from dateutil.parser import parse +from registry_schemas.example_data import COURT_ORDER, REGISTRATION +from sql_versioning import versioned_session # from legal_api.services import NaicsService -from entity_filer.filing_processors.filing_components.legal_entity_info import ( - NaicsService, -) -from registry_schemas.example_data import ( - COURT_ORDER, - REGISTRATION, -) - -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.filing_processors.filing_components.legal_entity_info import NaicsService +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import ( create_alias, create_entity, + create_entity_person, + create_entity_role, create_filing, create_office, create_office_address, - create_entity_person, - create_entity_role, factory_completed_filing, + nested_session, ) -from tests.unit import nested_session -from sql_versioning import versioned_session CONTACT_POINT = {"email": "no_one@never.get", "phone": "123-456-7890"} @@ -138,9 +131,7 @@ "postalCode": "H0H0H0", "addressRegion": "BC", }, - "roles": [ - {"roleType": "Director", "appointmentDate": "2022-01-01"} - ], + "roles": [{"roleType": "Director", "appointmentDate": "2022-01-01"}], }, { "officer": { @@ -256,9 +247,7 @@ def test_correction_name_change( filing = copy.deepcopy(filing_template) - corrected_filing_id = factory_completed_filing( - business, BC_CORRECTION_APPLICATION - ).id + corrected_filing_id = factory_completed_filing(business, BC_CORRECTION_APPLICATION).id filing["filing"]["correction"]["correctedFilingId"] = corrected_filing_id del filing["filing"]["correction"]["parties"][0]["officer"]["id"] del filing["filing"]["correction"]["parties"][1]["officer"]["id"] @@ -331,9 +320,7 @@ def test_correction_name_translation(app, session, mocker, test_name, legal_type alias = create_alias(business, "ABCD") filing["filing"]["correction"]["nameTranslations"][0]["id"] = str(alias.id) - corrected_filing_id = factory_completed_filing( - business, BC_CORRECTION_APPLICATION - ).id + corrected_filing_id = factory_completed_filing(business, BC_CORRECTION_APPLICATION).id filing["filing"]["correction"]["correctedFilingId"] = corrected_filing_id filing["filing"]["correction"]["correctedFilingType"] = "incorporationApplication" @@ -385,9 +372,7 @@ def test_correction_name_translation(app, session, mocker, test_name, legal_type ("ulc_address_change", "ULC", "Test Firm", BC_CORRECTION), ], ) -def test_correction_business_address( - app, session, mocker, test_name, legal_type, legal_name, filing_template -): +def test_correction_business_address(app, session, mocker, test_name, legal_type, legal_name, filing_template): """Assert the worker process calls the business address change correctly.""" identifier = "BC1234567" business = create_entity(identifier, legal_type, legal_name) @@ -404,21 +389,19 @@ def test_correction_business_address( filing = copy.deepcopy(filing_template) - corrected_filing_id = factory_completed_filing( - business, BC_CORRECTION_APPLICATION - ).id + corrected_filing_id = factory_completed_filing(business, BC_CORRECTION_APPLICATION).id filing["filing"]["correction"]["correctedFilingId"] = corrected_filing_id del filing["filing"]["correction"]["nameRequest"] del filing["filing"]["correction"]["parties"][0]["officer"]["id"] del filing["filing"]["correction"]["parties"][1]["officer"]["id"] - filing["filing"]["correction"]["offices"]["registeredOffice"][ - "deliveryAddress" - ] = Address.find_by_id(office_delivery_address_id).json - filing["filing"]["correction"]["offices"]["registeredOffice"][ - "mailingAddress" - ] = Address.find_by_id(office_mailing_address_id).json + filing["filing"]["correction"]["offices"]["registeredOffice"]["deliveryAddress"] = Address.find_by_id( + office_delivery_address_id + ).json + filing["filing"]["correction"]["offices"]["registeredOffice"]["mailingAddress"] = Address.find_by_id( + office_mailing_address_id + ).json payment_id = str(random.SystemRandom().getrandbits(0x58)) @@ -445,17 +428,13 @@ def test_correction_business_address( for key in ["streetAddress", "postalCode", "addressCity", "addressRegion"]: assert ( changed_delivery_address.json[key] - == filing["filing"]["correction"]["offices"]["registeredOffice"][ - "deliveryAddress" - ][key] + == filing["filing"]["correction"]["offices"]["registeredOffice"]["deliveryAddress"][key] ) changed_mailing_address = Address.find_by_id(office_mailing_address_id) for key in ["streetAddress", "postalCode", "addressCity", "addressRegion"]: assert ( changed_mailing_address.json[key] - == filing["filing"]["correction"]["offices"]["registeredOffice"][ - "mailingAddress" - ][key] + == filing["filing"]["correction"]["offices"]["registeredOffice"]["mailingAddress"][key] ) @@ -468,9 +447,7 @@ def test_correction_business_address( ("ulc_court_order", "ULC", BC_CORRECTION), ], ) -def test_worker_correction_court_order( - app, session, mocker, test_name, legal_type, filing_template -): +def test_worker_correction_court_order(app, session, mocker, test_name, legal_type, filing_template): """Assert the worker process process the court order correctly.""" identifier = "BC1234567" business = create_entity(identifier, legal_type, "Test Entity") @@ -537,9 +514,7 @@ def test_worker_correction_court_order( ("ulc_delete_director", "ULC"), ], ) -def test_worker_director_name_and_address_change( - app, session, mocker, test_name, legal_type -): +def test_worker_director_name_and_address_change(app, session, mocker, test_name, legal_type): """Assert the worker processes the court order correctly.""" identifier = "BC1234567" versioned_session(session) @@ -548,13 +523,9 @@ def test_worker_director_name_and_address_change( business = create_entity(identifier, legal_type, "Test Entity") business_id = business.id - party1 = create_entity_person( - BC_CORRECTION["filing"]["correction"]["parties"][0] - ) + party1 = create_entity_person(BC_CORRECTION["filing"]["correction"]["parties"][0]) party_id_1 = party1.id - party2 = create_entity_person( - BC_CORRECTION["filing"]["correction"]["parties"][1] - ) + party2 = create_entity_person(BC_CORRECTION["filing"]["correction"]["parties"][1]) party_id_2 = party2.id create_entity_role(business, party1, ["director"], datetime.utcnow()) @@ -573,27 +544,17 @@ def test_worker_director_name_and_address_change( if "add_director" in test_name: # filing['filing']['correction']['parties'][0]['officer']['id'] = party_id_1 # filing['filing']['correction']['parties'][1]['officer']['id'] = party_id_2 - new_party_json = copy.deepcopy( - BC_CORRECTION["filing"]["correction"]["parties"][1] - ) + new_party_json = copy.deepcopy(BC_CORRECTION["filing"]["correction"]["parties"][1]) del new_party_json["officer"]["id"] new_party_json["officer"]["firstName"] = "New Name" filing["filing"]["correction"]["parties"].append(new_party_json) if "edit_director_name_and_address" in test_name: # filing['filing']['correction']['parties'][0]['officer']['id'] = party_id_1 - filing["filing"]["correction"]["parties"][0]["officer"][ - "firstName" - ] = "New Name a" - filing["filing"]["correction"]["parties"][0]["officer"][ - "middleInitial" - ] = "New Name a" - filing["filing"]["correction"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] = "New Name" - filing["filing"]["correction"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] = "New Name" + filing["filing"]["correction"]["parties"][0]["officer"]["firstName"] = "New Name a" + filing["filing"]["correction"]["parties"][0]["officer"]["middleInitial"] = "New Name a" + filing["filing"]["correction"]["parties"][0]["mailingAddress"]["streetAddress"] = "New Name" + filing["filing"]["correction"]["parties"][0]["deliveryAddress"]["streetAddress"] = "New Name" # filing['filing']['correction']['parties'][1]['officer']['id'] = party_id_2 if "delete_director" in test_name: @@ -637,23 +598,14 @@ def test_worker_director_name_and_address_change( party_role = candidate break - assert ( - party.first_name - == filing["filing"]["correction"]["parties"][0]["officer"][ - "firstName" - ].upper() - ) + assert party.first_name == filing["filing"]["correction"]["parties"][0]["officer"]["firstName"].upper() assert ( party_role.delivery_address.street - == filing["filing"]["correction"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] + == filing["filing"]["correction"]["parties"][0]["deliveryAddress"]["streetAddress"] ) assert ( party_role.mailing_address.street - == filing["filing"]["correction"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] + == filing["filing"]["correction"]["parties"][0]["mailingAddress"]["streetAddress"] ) assert business.entity_roles.all()[0].cessation_date is None assert business.entity_roles.all()[1].cessation_date is None @@ -700,12 +652,8 @@ def test_worker_resolution_dates_change(app, session, mocker, test_name, legal_t business = create_entity(identifier, legal_type, "Test Entity") business_id = business.id - resolution_dates_json1 = BC_CORRECTION["filing"]["correction"]["shareStructure"][ - "resolutionDates" - ][0] - resolution_dates_json2 = BC_CORRECTION["filing"]["correction"]["shareStructure"][ - "resolutionDates" - ][1] + resolution_dates_json1 = BC_CORRECTION["filing"]["correction"]["shareStructure"]["resolutionDates"][0] + resolution_dates_json2 = BC_CORRECTION["filing"]["correction"]["shareStructure"]["resolutionDates"][1] filing = copy.deepcopy(BC_CORRECTION) @@ -716,9 +664,7 @@ def test_worker_resolution_dates_change(app, session, mocker, test_name, legal_t if "add_resolution_dates" in test_name: new_resolution_dates = "2022-09-01" - filing["filing"]["correction"]["shareStructure"]["resolutionDates"].append( - new_resolution_dates - ) + filing["filing"]["correction"]["shareStructure"]["resolutionDates"].append(new_resolution_dates) if "delete_resolution_dates" in test_name: del filing["filing"]["correction"]["shareStructure"]["resolutionDates"][0] @@ -730,19 +676,12 @@ def test_worker_resolution_dates_change(app, session, mocker, test_name, legal_t payment_id = str(random.SystemRandom().getrandbits(0x58)) filing_id = (create_filing(payment_id, filing, business_id=business.id)).id - if ( - "update_existing_resolution_dates" in test_name - or "update_with_new_resolution_dates" in test_name - ): + if "update_existing_resolution_dates" in test_name or "update_with_new_resolution_dates" in test_name: updated_resolution_dates = "2022-09-01" if "update_existing_resolution_dates" in test_name: - filing["filing"]["correction"]["shareStructure"]["resolutionDates"][ - 1 - ] = updated_resolution_dates + filing["filing"]["correction"]["shareStructure"]["resolutionDates"][1] = updated_resolution_dates else: - filing["filing"]["correction"]["shareStructure"]["resolutionDates"] = [ - updated_resolution_dates - ] + filing["filing"]["correction"]["shareStructure"]["resolutionDates"] = [updated_resolution_dates] payment_id = str(random.SystemRandom().getrandbits(0x58)) filing_id = (create_filing(payment_id, filing, business_id=business.id)).id @@ -811,20 +750,14 @@ def test_worker_resolution_dates_change(app, session, mocker, test_name, legal_t ("ulc_delete_share_class", "ULC"), ], ) -def test_worker_share_class_and_series_change( - app, session, mocker, test_name, legal_type -): +def test_worker_share_class_and_series_change(app, session, mocker, test_name, legal_type): """Assert the worker processes the court order correctly.""" identifier = "BC1234567" business = create_entity(identifier, legal_type, "Test Entity") business_id = business.id - share_class_json1 = BC_CORRECTION["filing"]["correction"]["shareStructure"][ - "shareClasses" - ][0] - share_class_json2 = BC_CORRECTION["filing"]["correction"]["shareStructure"][ - "shareClasses" - ][1] + share_class_json1 = BC_CORRECTION["filing"]["correction"]["shareStructure"]["shareClasses"][0] + share_class_json2 = BC_CORRECTION["filing"]["correction"]["shareStructure"]["shareClasses"][1] filing = copy.deepcopy(BC_CORRECTION) @@ -834,14 +767,10 @@ def test_worker_share_class_and_series_change( filing["filing"]["correction"]["contactPoint"] = CONTACT_POINT if "add_share_class" in test_name: - new_share_class_json = copy.deepcopy( - BC_CORRECTION["filing"]["correction"]["shareStructure"]["shareClasses"][1] - ) + new_share_class_json = copy.deepcopy(BC_CORRECTION["filing"]["correction"]["shareStructure"]["shareClasses"][1]) del new_share_class_json["id"] new_share_class_json["name"] = "New Share Class" - filing["filing"]["correction"]["shareStructure"]["shareClasses"].append( - new_share_class_json - ) + filing["filing"]["correction"]["shareStructure"]["shareClasses"].append(new_share_class_json) if "delete_share_class" in test_name: del filing["filing"]["correction"]["shareStructure"]["shareClasses"][0] @@ -853,10 +782,7 @@ def test_worker_share_class_and_series_change( payment_id = str(random.SystemRandom().getrandbits(0x58)) filing_id = (create_filing(payment_id, filing, business_id=business.id)).id - if ( - "update_existing_share_class" in test_name - or "update_with_new_share_class" in test_name - ): + if "update_existing_share_class" in test_name or "update_with_new_share_class" in test_name: updated_share_series = [ { "id": 1, @@ -888,13 +814,9 @@ def test_worker_share_class_and_series_change( "series": updated_share_series, } if "update_existing_share_class" in test_name: - filing["filing"]["correction"]["shareStructure"]["shareClasses"][ - 0 - ] = updated_share_class + filing["filing"]["correction"]["shareStructure"]["shareClasses"][0] = updated_share_class else: - filing["filing"]["correction"]["shareStructure"]["shareClasses"] = [ - updated_share_class - ] + filing["filing"]["correction"]["shareStructure"]["shareClasses"] = [updated_share_class] payment_id = str(random.SystemRandom().getrandbits(0x58)) filing_id = (create_filing(payment_id, filing, business_id=business.id)).id @@ -927,26 +849,15 @@ def test_worker_share_class_and_series_change( if "update_existing_share_class" in test_name: assert len(business.share_classes.all()) == 2 assert business.share_classes.all()[0].name == updated_share_class["name"] - assert ( - business.share_classes.all()[0].special_rights_flag - == updated_share_class["hasRightsOrRestrictions"] - ) + assert business.share_classes.all()[0].special_rights_flag == updated_share_class["hasRightsOrRestrictions"] assert business.share_classes.all()[1].name == share_class_json2["name"] - assert [ - item.json for item in business.share_classes.all()[1].series - ] == share_class_json2["series"] + assert [item.json for item in business.share_classes.all()[1].series] == share_class_json2["series"] if "update_with_new_share_class" in test_name: assert len(business.share_classes.all()) == 1 assert business.share_classes.all()[0].name == updated_share_class["name"] - assert ( - business.share_classes.all()[0].par_value_flag - == updated_share_class["hasParValue"] - ) - assert ( - business.share_classes.all()[0].special_rights_flag - == updated_share_class["hasRightsOrRestrictions"] - ) + assert business.share_classes.all()[0].par_value_flag == updated_share_class["hasParValue"] + assert business.share_classes.all()[0].special_rights_flag == updated_share_class["hasRightsOrRestrictions"] share_series = [item.json for item in business.share_classes.all()[0].series] for key in share_series[0].keys(): if key != "id": @@ -957,14 +868,7 @@ def test_worker_share_class_and_series_change( assert len(business.share_classes.all()) == 1 assert business.share_classes.all()[0].name == share_class_json2["name"] assert business.share_classes.all()[0].priority == share_class_json2["priority"] - assert ( - business.share_classes.all()[0].max_shares - == share_class_json2["maxNumberOfShares"] - ) - assert ( - business.share_classes.all()[0].par_value == share_class_json2["parValue"] - ) + assert business.share_classes.all()[0].max_shares == share_class_json2["maxNumberOfShares"] + assert business.share_classes.all()[0].par_value == share_class_json2["parValue"] assert business.share_classes.all()[0].currency == share_class_json2["currency"] - assert [ - item.json for item in business.share_classes.all()[0].series - ] == share_class_json2["series"] + assert [item.json for item in business.share_classes.all()[0].series] == share_class_json2["series"] diff --git a/queue_services/entity-filer/tests/unit/worker/test_correction_special_resolution.py b/queue_services/entity-filer/tests/unit/worker/test_correction_special_resolution.py index 72ec1b699c..ce37ec21db 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_correction_special_resolution.py +++ b/queue_services/entity-filer/tests/unit/worker/test_correction_special_resolution.py @@ -18,9 +18,9 @@ import random import pytest -from dateutil.parser import parse -from business_model import LegalEntity, Document, Filing, EntityRole, DocumentType +from business_model import Document, DocumentType, EntityRole, Filing, LegalEntity from business_model.utils import datetime +from dateutil.parser import parse # from legal_api.services.minio import MinioService from registry_schemas.example_data import ( @@ -28,6 +28,7 @@ CP_SPECIAL_RESOLUTION_TEMPLATE, FILING_HEADER, ) + from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors import correction from entity_filer.resources.worker import process_filing @@ -99,7 +100,7 @@ def test_special_resolution_correction( sr_filing_msg = {"filing": {"id": sr_filing_id}} # Call the process_filing method for the original special resolution process_filing(sr_filing_msg) - + if correct_filing_type == "changeOfAddress": correction_data = copy.deepcopy(correction_template) correction_data["filing"]["changeOfAddress"]["offices"] = {} @@ -123,9 +124,7 @@ def test_special_resolution_correction( correction_data["filing"]["correction"] = {} correction_data["filing"]["correction"]["correctedFilingId"] = sr_filing_id correction_payment_id = str(random.SystemRandom().getrandbits(0x58)) - correction_filing_id = ( - create_filing(correction_payment_id, correction_data, business_id=business_id) - ).id + correction_filing_id = (create_filing(correction_payment_id, correction_data, business_id=business_id)).id # Mock the correction filing message correction_filing_msg = {"filing": {"id": correction_filing_id}} @@ -145,9 +144,7 @@ def test_special_resolution_correction( resolution = business.resolutions.first() assert business.association_type == "HC" assert resolution is not None, "Resolution should exist" - assert ( - resolution.resolution == "

xxxx

" - ), "Resolution text should be corrected" + assert resolution.resolution == "

xxxx

", "Resolution text should be corrected" # # # Check if the signatory was updated party = resolution.party @@ -166,9 +163,7 @@ def test_special_resolution_correction( resolution_date = "2023-06-16" signing_date = "2023-06-17" correction_data_2 = copy.deepcopy(FILING_HEADER) - correction_data_2["filing"]["correction"] = copy.deepcopy( - CORRECTION_CP_SPECIAL_RESOLUTION - ) + correction_data_2["filing"]["correction"] = copy.deepcopy(CORRECTION_CP_SPECIAL_RESOLUTION) correction_data_2["filing"]["header"]["name"] = "correction" correction_data_2["filing"]["business"] = {"identifier": identifier} correction_data_2["filing"]["correction"]["correctedFilingType"] = "correction" @@ -183,15 +178,9 @@ def test_special_resolution_correction( # Update correction data to point to the original special resolution filing if "correction" not in correction_data_2["filing"]: correction_data_2["filing"]["correction"] = {} - correction_data_2["filing"]["correction"][ - "correctedFilingId" - ] = correction_filing_id + correction_data_2["filing"]["correction"]["correctedFilingId"] = correction_filing_id correction_payment_id_2 = str(random.SystemRandom().getrandbits(0x58)) - correction_filing_id_2 = ( - create_filing( - correction_payment_id_2, correction_data_2, business_id=business_id - ) - ).id + correction_filing_id_2 = (create_filing(correction_payment_id_2, correction_data_2, business_id=business_id)).id # Mock the correction filing message correction_filing_msg_2 = {"filing": {"id": correction_filing_id_2}} @@ -201,9 +190,7 @@ def test_special_resolution_correction( business = LegalEntity.find_by_internal_id(business_id) resolution = business.resolutions.first() assert resolution is not None, "Resolution should exist" - assert ( - resolution.resolution == "

yyyy

" - ), "Resolution text should be corrected" + assert resolution.resolution == "

yyyy

", "Resolution text should be corrected" assert resolution.resolution_date == parse(resolution_date).date() assert resolution.signing_date == parse(signing_date).date() @@ -235,12 +222,8 @@ def test_correction_coop_rules(app, session): correction_filing = copy.deepcopy(FILING_HEADER) correction_filing["filing"]["header"]["name"] = "correction" - correction_filing["filing"]["business"][ - "legalType" - ] = LegalEntity.EntityTypes.COOP.value - correction_filing["filing"]["correction"] = copy.deepcopy( - CORRECTION_CP_SPECIAL_RESOLUTION - ) + correction_filing["filing"]["business"]["legalType"] = LegalEntity.EntityTypes.COOP.value + correction_filing["filing"]["correction"] = copy.deepcopy(CORRECTION_CP_SPECIAL_RESOLUTION) correction_filing["filing"]["correction"]["correctedFilingId"] = sr_filing_id # rules_file_key_uploaded_by_user = upload_file('rules.pdf') # correction_filing['filing']['correction']['rulesFileKey'] = rules_file_key_uploaded_by_user @@ -248,9 +231,7 @@ def test_correction_coop_rules(app, session): payment_id = str(random.SystemRandom().getrandbits(0x58)) - filing_submission = create_filing( - payment_id, correction_filing, business_id=business.id - ) + filing_submission = create_filing(payment_id, correction_filing, business_id=business.id) filing_meta = FilingMeta() diff --git a/queue_services/entity-filer/tests/unit/worker/test_incorporation.py b/queue_services/entity-filer/tests/unit/worker/test_incorporation.py index 380f2558de..82b34d75ba 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_incorporation.py +++ b/queue_services/entity-filer/tests/unit/worker/test_incorporation.py @@ -19,21 +19,20 @@ import secrets import string from http import HTTPStatus -from unittest.mock import patch, call +from unittest.mock import call, patch import pytest -from flask import current_app # # from entity_queue_common.messages import get_data_from_msg # from entity_queue_common.service_utils import subscribe_to_queue -from business_model import LegalEntity, Filing, EntityRole, RegistrationBootstrap +from business_model import EntityRole, Filing, LegalEntity +from flask import current_app # from legal_api.services import RegistrationBootstrapService # from legal_api.services.bootstrap import AccountService from registry_schemas.example_data import INCORPORATION_FILING_TEMPLATE -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing # from tests.pytest_marks import colin_api_integration, integration_affiliation, integration_namex_api from tests.unit import create_filing @@ -46,9 +45,7 @@ def bootstrap(account): bootstrap = RegistrationBootstrap() allowed_encoded = string.ascii_letters + string.digits - bootstrap.identifier = "T" + "".join( - secrets.choice(allowed_encoded) for _ in range(9) - ) + bootstrap.identifier = "T" + "".join(secrets.choice(allowed_encoded) for _ in range(9)) bootstrap.save() yield bootstrap.identifier @@ -60,9 +57,7 @@ def bootstrap(account): def test_incorporation_filing(app, session, bootstrap, requests_mock): """Assert we can retrieve a new corp number from COLIN and incorporate a business.""" filing = copy.deepcopy(INCORPORATION_FILING_TEMPLATE) - filing["filing"]["incorporationApplication"]["nameRequest"][ - "nrNumber" - ] = "NR 0000021" + filing["filing"]["incorporationApplication"]["nameRequest"]["nrNumber"] = "NR 0000021" payment_id = str(random.SystemRandom().getrandbits(0x58)) del filing["filing"]["incorporationApplication"]["parties"][0]["officer"]["id"] del filing["filing"]["incorporationApplication"]["parties"][1]["officer"]["id"] @@ -75,9 +70,7 @@ def test_incorporation_filing(app, session, bootstrap, requests_mock): response = "1234567" with app.app_context(): current_app.config["COLIN_API"] = "http://localhost" - requests_mock.post( - f'{current_app.config["COLIN_API"]}/BC', json={"corpNum": response} - ) + requests_mock.post(f'{current_app.config["COLIN_API"]}/BC', json={"corpNum": response}) process_filing(filing_msg) # Check outcome @@ -89,28 +82,16 @@ def test_incorporation_filing(app, session, bootstrap, requests_mock): assert filing assert filing.status == Filing.Status.COMPLETED.value assert business.identifier == filing_json["filing"]["business"]["identifier"] - assert ( - business.founding_date.isoformat() - == filing_json["filing"]["business"]["foundingDate"] - ) + assert business.founding_date.isoformat() == filing_json["filing"]["business"]["foundingDate"] assert len(business.share_classes.all()) == len( - filing_json["filing"]["incorporationApplication"]["shareStructure"][ - "shareClasses" - ] - ) - assert len(business.offices.all()) == len( - filing_json["filing"]["incorporationApplication"]["offices"] + filing_json["filing"]["incorporationApplication"]["shareStructure"]["shareClasses"] ) + assert len(business.offices.all()) == len(filing_json["filing"]["incorporationApplication"]["offices"]) assert len(EntityRole.get_parties_by_role(business.id, "director")) == 1 assert len(EntityRole.get_parties_by_role(business.id, "incorporator")) == 1 - assert ( - len(EntityRole.get_entity_roles_by_filing(filing.id, role="completing_party")) - == 1 - ) + assert len(EntityRole.get_entity_roles_by_filing(filing.id, role="completing_party")) == 1 incorporator = (EntityRole.get_parties_by_role(business.id, "incorporator"))[0] - completing_party = ( - EntityRole.get_entity_roles_by_filing(filing.id, role="completing_party") - )[0] + completing_party = (EntityRole.get_entity_roles_by_filing(filing.id, role="completing_party"))[0] assert incorporator.appointment_date assert completing_party.appointment_date diff --git a/queue_services/entity-filer/tests/unit/worker/test_resource.py b/queue_services/entity-filer/tests/unit/worker/test_resource.py index 8371343f7e..e338c25ce2 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_resource.py +++ b/queue_services/entity-filer/tests/unit/worker/test_resource.py @@ -4,8 +4,8 @@ from http import HTTPStatus import pytest +from registry_schemas.example_data import FILING_HEADER, SPECIAL_RESOLUTION from simple_cloudevent import SimpleCloudEvent, to_queue_message -from registry_schemas.example_data import SPECIAL_RESOLUTION, FILING_HEADER from tests.unit import create_business, create_filing @@ -85,9 +85,7 @@ def test_process_simple_filing(client, session): business = create_business(identifier, legal_type=legal_type) payment_id = str(random.SystemRandom().getrandbits(0x58)) - filing_id = ( - create_filing(payment_id, filing_submission, business_id=business.id) - ).id + filing_id = (create_filing(payment_id, filing_submission, business_id=business.id)).id ce = SimpleCloudEvent( id="fake-id", diff --git a/queue_services/entity-filer/tests/unit/worker/test_restoration.py b/queue_services/entity-filer/tests/unit/worker/test_restoration.py index 073acf17cb..308f344cc2 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_restoration.py +++ b/queue_services/entity-filer/tests/unit/worker/test_restoration.py @@ -16,17 +16,14 @@ import random import pytest -from business_model import LegalEntity, Filing, EntityRole, OfficeType, Address +from business_model import Address, EntityRole, Filing, LegalEntity, OfficeType from business_model.utils.datetime import datetime from business_model.utils.legislation_datetime import LegislationDatetime from registry_schemas.example_data import FILING_HEADER, RESTORATION - -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage -from tests.unit import create_business, create_filing - from sql_versioning import versioned_session -from tests.unit import nested_session + +from entity_filer.resources.worker import FilingMessage, process_filing +from tests.unit import create_business, create_filing, nested_session legal_name = "old name" legal_type = "BC" @@ -73,9 +70,7 @@ def test_restoration_business_update(app, session, mocker, restoration_type): assert business.dissolution_date is None if restoration_type in ("limitedRestoration", "limitedRestorationExtension"): - assert business.restoration_expiry_date == datetime.fromisoformat( - f"{expiry_date}T07:00:00+00:00" - ) + assert business.restoration_expiry_date == datetime.fromisoformat(f"{expiry_date}T07:00:00+00:00") final_filing = Filing.find_by_id(filing_id) restoration = final_filing.meta_data.get("restoration", {}) @@ -126,9 +121,7 @@ def test_restoration_legal_name(app, session, mocker, test_name): assert restoration.get("toLegalName") == new_legal_name assert restoration.get("fromLegalName") == legal_name else: - numbered_legal_name_suffix = LegalEntity.BUSINESSES[legal_type][ - "numberedBusinessNameSuffix" - ] + numbered_legal_name_suffix = LegalEntity.BUSINESSES[legal_type]["numberedBusinessNameSuffix"] new_legal_name = f"{identifier[2:]} {numbered_legal_name_suffix}" assert business.legal_name == new_legal_name assert restoration.get("toLegalName") == new_legal_name @@ -174,17 +167,13 @@ def test_restoration_office_addresses(app, session, mocker): for key in ["streetAddress", "postalCode", "addressCity", "addressRegion"]: assert ( changed_delivery_address.json[key] - == filing["filing"]["restoration"]["offices"]["registeredOffice"][ - "deliveryAddress" - ][key] + == filing["filing"]["restoration"]["offices"]["registeredOffice"]["deliveryAddress"][key] ) # changed_mailing_address = business.entity_mailing_address.one_or_none() for key in ["streetAddress", "postalCode", "addressCity", "addressRegion"]: assert ( changed_mailing_address.json[key] - == filing["filing"]["restoration"]["offices"]["registeredOffice"][ - "mailingAddress" - ][key] + == filing["filing"]["restoration"]["offices"]["registeredOffice"]["mailingAddress"][key] ) @@ -216,10 +205,7 @@ def test_restoration_court_order(app, session, mocker, approval_type): final_filing = Filing.find_by_id(filing_id) assert filing["filing"]["restoration"]["approvalType"] == final_filing.approval_type if approval_type == "courtOrder": - assert ( - filing["filing"]["restoration"]["courtOrder"]["fileNumber"] - == final_filing.court_order_file_number - ) + assert filing["filing"]["restoration"]["courtOrder"]["fileNumber"] == final_filing.court_order_file_number else: assert final_filing.court_order_file_number is None @@ -258,18 +244,10 @@ def test_restoration_registrar(app, session, mocker, approval_type): final_filing = Filing.find_by_id(filing_id) assert filing["filing"]["restoration"]["approvalType"] == final_filing.approval_type if approval_type == "registrar": - assert final_filing.application_date == datetime.fromisoformat( - f"{application_date}T08:00:00+00:00" - ) - assert final_filing.notice_date == datetime.fromisoformat( - f"{notice_date}T07:00:00+00:00" - ) - assert application_date == LegislationDatetime.format_as_legislation_date( - final_filing.application_date - ) - assert notice_date == LegislationDatetime.format_as_legislation_date( - final_filing.notice_date - ) + assert final_filing.application_date == datetime.fromisoformat(f"{application_date}T08:00:00+00:00") + assert final_filing.notice_date == datetime.fromisoformat(f"{notice_date}T07:00:00+00:00") + assert application_date == LegislationDatetime.format_as_legislation_date(final_filing.application_date) + assert notice_date == LegislationDatetime.format_as_legislation_date(final_filing.notice_date) else: assert final_filing.application_date is None assert final_filing.notice_date is None @@ -296,9 +274,7 @@ def test_restoration_name_translations(app, session, mocker): process_filing(filing_msg) # Check outcome - assert filing["filing"]["restoration"]["nameTranslations"] == [ - {"name": "ABCD Ltd."} - ] + assert filing["filing"]["restoration"]["nameTranslations"] == [{"name": "ABCD Ltd."}] assert business.aliases is not None @@ -308,9 +284,7 @@ def test_update_party(app, session, mocker): versioned_session(session) with nested_session(session): identifier = "BC1234567" - business = create_business( - identifier, legal_type=legal_type, legal_name=legal_name - ) + business = create_business(identifier, legal_type=legal_type, legal_name=legal_name) business.save() business_id = business.id filing = copy.deepcopy(FILING_HEADER) @@ -350,9 +324,7 @@ def test_update_party(app, session, mocker): # Check outcome # TODO by business not filing - historical_roles = EntityRole.get_entity_roles_history_for_entity( - entity_id=business_id - ) + historical_roles = EntityRole.get_entity_roles_history_for_entity(entity_id=business_id) number_of_historical_roles = len(historical_roles) assert number_of_historical_roles == 2 assert historical_roles[1].cessation_date @@ -364,21 +336,15 @@ def test_update_party(app, session, mocker): assert party_role.role_type == EntityRole.RoleTypes.applicant.value assert ( party_role.related_entity.first_name - == filing["filing"]["restoration"]["parties"][0]["officer"][ - "firstName" - ].upper() + == filing["filing"]["restoration"]["parties"][0]["officer"]["firstName"].upper() ) assert ( party_role.delivery_address.street - == filing["filing"]["restoration"]["parties"][0]["deliveryAddress"][ - "streetAddress" - ] + == filing["filing"]["restoration"]["parties"][0]["deliveryAddress"]["streetAddress"] ) assert ( party_role.mailing_address.street - == filing["filing"]["restoration"]["parties"][0]["mailingAddress"][ - "streetAddress" - ] + == filing["filing"]["restoration"]["parties"][0]["mailingAddress"]["streetAddress"] ) diff --git a/queue_services/entity-filer/tests/unit/worker/test_special_resolution.py b/queue_services/entity-filer/tests/unit/worker/test_special_resolution.py index 1e5767c914..947286a3c2 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_special_resolution.py +++ b/queue_services/entity-filer/tests/unit/worker/test_special_resolution.py @@ -18,11 +18,10 @@ import random import pytest -from business_model import LegalEntity, Filing +from business_model import Filing, LegalEntity from registry_schemas.example_data import CP_SPECIAL_RESOLUTION_TEMPLATE -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_entity, create_filing diff --git a/queue_services/entity-filer/tests/unit/worker/test_technical_correction.py b/queue_services/entity-filer/tests/unit/worker/test_technical_correction.py index d6cf2928f2..7f3d50766f 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_technical_correction.py +++ b/queue_services/entity-filer/tests/unit/worker/test_technical_correction.py @@ -16,15 +16,10 @@ import json import random -from business_model import LegalEntity, Filing, PartyRole -from registry_schemas.example_data import ( - ANNUAL_REPORT, - FILING_HEADER, - SPECIAL_RESOLUTION, -) +from business_model import Filing, LegalEntity, PartyRole +from registry_schemas.example_data import ANNUAL_REPORT, FILING_HEADER, SPECIAL_RESOLUTION -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -64,9 +59,7 @@ def test_technical_correction_ar(app, session): # subvert the filing technical_correction_filing = copy.deepcopy(FILING_HEADER) - technical_correction_filing["filing"]["specialResolution"] = copy.deepcopy( - SPECIAL_RESOLUTION - ) + technical_correction_filing["filing"]["specialResolution"] = copy.deepcopy(SPECIAL_RESOLUTION) filing.tech_correction_json = technical_correction_filing # over ride the state and skip state setting listeners for this test filing.skip_status_listener = True diff --git a/queue_services/entity-filer/tests/unit/worker/test_transition.py b/queue_services/entity-filer/tests/unit/worker/test_transition.py index 3367357a00..0054d6842f 100644 --- a/queue_services/entity-filer/tests/unit/worker/test_transition.py +++ b/queue_services/entity-filer/tests/unit/worker/test_transition.py @@ -15,15 +15,10 @@ import copy import random -from business_model import LegalEntity, Filing, EntityRole -from registry_schemas.example_data import ( - TRANSITION_FILING_TEMPLATE, - FILING_HEADER, - TRANSITION, -) +from business_model import EntityRole, Filing, LegalEntity +from registry_schemas.example_data import FILING_HEADER, TRANSITION, TRANSITION_FILING_TEMPLATE -from entity_filer.resources.worker import process_filing -from entity_filer.resources.worker import FilingMessage +from entity_filer.resources.worker import FilingMessage, process_filing from tests.unit import create_business, create_filing @@ -62,12 +57,8 @@ def test_transition_filing(app, session): assert len(business.share_classes.all()) == len( filing_json["filing"]["transition"]["shareStructure"]["shareClasses"] ) - assert len(business.offices.all()) == len( - filing_json["filing"]["transition"]["offices"] - ) - assert len(business.aliases.all()) == len( - filing_json["filing"]["transition"]["nameTranslations"] - ) + assert len(business.offices.all()) == len(filing_json["filing"]["transition"]["offices"]) + assert len(business.aliases.all()) == len(filing_json["filing"]["transition"]["nameTranslations"]) assert len(business.resolutions.all()) == len( filing_json["filing"]["transition"]["shareStructure"]["resolutionDates"] ) diff --git a/queue_services/entity-pay/__init__.py b/queue_services/entity-pay/__init__.py new file mode 100644 index 0000000000..c09781299b --- /dev/null +++ b/queue_services/entity-pay/__init__.py @@ -0,0 +1,15 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Init.""" +# pylint: disable=invalid-name diff --git a/queue_services/entity-pay/poetry.lock b/queue_services/entity-pay/poetry.lock index f7b68bfe92..7790cbf1b7 100644 --- a/queue_services/entity-pay/poetry.lock +++ b/queue_services/entity-pay/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -106,6 +106,17 @@ files = [ {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, ] +[[package]] +name = "astroid" +version = "3.0.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, +] + [[package]] name = "asyncio-nats-client" version = "0.11.5" @@ -168,6 +179,29 @@ setuptools = {version = "*", markers = "python_version >= \"3.12\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "bandit" +version = "1.7.7" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, + {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -188,29 +222,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -222,7 +260,7 @@ platformdirs = ">=2" [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -458,6 +496,70 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "datedelta" version = "1.4" @@ -469,6 +571,20 @@ files = [ {file = "datedelta-1.4.tar.gz", hash = "sha256:3f1ef319ead642a76a3cab731917bf14a0ced0d91943f33ff57ae615837cab97"}, ] +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + [[package]] name = "dpath" version = "2.1.6" @@ -512,6 +628,38 @@ files = [ [package.extras] tests = ["coverage", "coveralls", "dill", "mock", "nose"] +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + [[package]] name = "flask" version = "2.3.3" @@ -966,6 +1114,20 @@ files = [ [package.dependencies] arrow = ">=0.15.0" +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -1244,6 +1406,30 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.3" @@ -1313,6 +1499,28 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "minio" version = "7.2.0" @@ -1363,6 +1571,17 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + [[package]] name = "pg8000" version = "1.30.3" @@ -1523,6 +1742,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -1557,6 +1778,17 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + [[package]] name = "pycountry" version = "22.3.5" @@ -1674,6 +1906,59 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pylint" +version = "3.0.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, +] + +[package.dependencies] +astroid = ">=3.0.1,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pypdf2" version = "3.0.1" @@ -1726,6 +2011,24 @@ pluggy = ">=0.12,<2.0" [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "pytest-mock" version = "3.12.0" @@ -1803,6 +2106,66 @@ files = [ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "referencing" version = "0.30.2" @@ -1923,6 +2286,24 @@ files = [ {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, ] +[[package]] +name = "rich" +version = "13.7.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "rpds-py" version = "0.12.0" @@ -2324,6 +2705,20 @@ test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3 timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] +[[package]] +name = "stevedore" +version = "5.1.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, + {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + [[package]] name = "strict-rfc3339" version = "0.7" @@ -2334,6 +2729,17 @@ files = [ {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, ] +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, +] + [[package]] name = "types-python-dateutil" version = "2.8.19.14" @@ -2422,4 +2828,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "3f4cca09051fb09edb4cdf71ba09f936199924ad1fcbaf836a93deec616d5792" +content-hash = "5191ad3b9436cfc77576e4e4332bc65f3f700c6a634383f8100f158659f77913" diff --git a/queue_services/entity-pay/pyproject.toml b/queue_services/entity-pay/pyproject.toml index 02d5ca92e4..a227b5767c 100644 --- a/queue_services/entity-pay/pyproject.toml +++ b/queue_services/entity-pay/pyproject.toml @@ -21,10 +21,133 @@ google-cloud-pubsub = "^2.17.1" pg8000 = "^1.29.8" [tool.poetry.group.dev.dependencies] -black = "^23.3.0" pytest = "^7.4.0" pytest-mock = "^3.11.1" +pytest-cov = "^4.0.0" requests-mock = "^1.11.0" +black = "^23.12.1" +pylint = "^3.0.3" +bandit = "^1.7.6" +flake8-pyproject = "^1.2.3" +isort = "^5.13.2" + +[tool.bandit] +exclude_dirs = [".venv","tests"] +skips = ["B104"] + +[tool.flake8] +ignore = ["F401","E402", "Q000", "E203", "W503"] +exclude = [ + ".venv", + ".git", + ".history", + "devops", + "*migrations*", +] +per-file-ignores = [ + "__init__.py:F401", + "*.py:B902" +] +max-line-length = 120 +docstring-min-length=10 +count = true + +[tool.black] +target-version = ["py310", "py311", "py312"] +line-length = 120 +include = '\.pyi?$' +extend-exclude = ''' +/( + # The following are specific to Black, you probably don't want those. + migrations + | devops +)/ +''' + +[tool.isort] +atomic = true +profile = "black" +line_length = 120 +skip_gitignore = true +skip_glob = ["migrations", "devops"] + +[tool.pylint.main] +fail-under = 10 +max-line-length = 120 +ignore = [ "migrations", "devops", "tests"] +ignore-patterns = ["^\\.#"] +ignored-modules= ["flask_sqlalchemy", "sqlalchemy", "SQLAlchemy" , "alembic", "scoped_session"] +ignored-classes= "scoped_session" +ignore-long-lines = "^\\s*(# )??$" +extension-pkg-whitelist = "pydantic" +notes = ["FIXME","XXX","TODO"] +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +disable = "C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101" +argument-naming-style = "snake_case" +attr-naming-style = "snake_case" +class-attribute-naming-style = "any" +class-const-naming-style = "UPPER_CASE" +class-naming-style = "PascalCase" +const-naming-style = "UPPER_CASE" +function-naming-style = "snake_case" +inlinevar-naming-style = "any" +method-naming-style = "snake_case" +module-naming-style = "any" +variable-naming-style = "snake_case" +docstring-min-length = -1 +good-names = ["i", "j", "k", "ex", "Run", "_"] +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +valid-classmethod-first-arg = ["cls"] +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pytest.ini_options] +minversion = "2.0" +testpaths = [ + "tests", +] +addopts = "--verbose --strict -p no:warnings --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml" +python_files = [ + "test*.py" +] +norecursedirs = [ + ".git", ".tox", "venv*", "requirements*", "build", +] +log_cli = true +log_cli_level = "1" +filterwarnings = [ + "ignore::UserWarning" +] +markers = [ + "slow", + "serial", +] + +[tool.coverage.run] +branch = true +source = [ + "src/entity-pay", +] +omit = [ + "wsgi.py", + "gunicorn_config.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "from", + "import", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + 'if __name__ == "__main__":', +] [build-system] requires = ["poetry-core"] diff --git a/queue_services/entity-pay/src/entity_pay/__init__.py b/queue_services/entity-pay/src/entity_pay/__init__.py index 07e9bf5bc1..37dbf095a6 100644 --- a/queue_services/entity-pay/src/entity_pay/__init__.py +++ b/queue_services/entity-pay/src/entity_pay/__init__.py @@ -44,8 +44,7 @@ from legal_api.utils.run_version import get_run_version from sentry_sdk.integrations.flask import FlaskIntegration -from .config import Config -from .config import Production +from .config import Config, Production from .resources import register_endpoints from .services import queue diff --git a/queue_services/entity-pay/src/entity_pay/config.py b/queue_services/entity-pay/src/entity_pay/config.py index 4c229ef85e..571c7736bc 100644 --- a/queue_services/entity-pay/src/entity_pay/config.py +++ b/queue_services/entity-pay/src/entity_pay/config.py @@ -43,7 +43,6 @@ from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) @@ -70,11 +69,11 @@ class Config: # pylint: disable=too-few-public-methods # POSTGRESQL if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): - SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" - else: SQLALCHEMY_DATABASE_URI = ( - f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" + f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" ) + else: + SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" # API Endpoints PAY_API_URL = os.getenv("PAY_API_URL", "") @@ -86,12 +85,8 @@ class Config: # pylint: disable=too-few-public-methods GCP_AUTH_KEY = os.getenv("GCP_AUTH_KEY", None) ENTITY_MAILER_TOPIC = os.getenv("ENTITY_MAILER_TOPIC", "mailer") ENTITY_FILER_TOPIC = os.getenv("ENTITY_FILER_TOPIC", "filer") - AUDIENCE = os.getenv( - "AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber" - ) - PUBLISHER_AUDIENCE = os.getenv( - "PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher" - ) + AUDIENCE = os.getenv("AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber") + PUBLISHER_AUDIENCE = os.getenv("PUBLISHER_AUDIENCE", "https://pubsub.googleapis.com/google.pubsub.v1.Publisher") class Development(Config): # pylint: disable=too-few-public-methods diff --git a/queue_services/entity-pay/src/entity_pay/resources/__init__.py b/queue_services/entity-pay/src/entity_pay/resources/__init__.py index b426459b91..6669974e8a 100644 --- a/queue_services/entity-pay/src/entity_pay/resources/__init__.py +++ b/queue_services/entity-pay/src/entity_pay/resources/__init__.py @@ -42,6 +42,7 @@ def register_endpoints(app: Flask): + """Register endpoints with the flask application""" # Allow base route to match with, and without a trailing slash app.url_map.strict_slashes = False diff --git a/queue_services/entity-pay/src/entity_pay/resources/worker.py b/queue_services/entity-pay/src/entity_pay/resources/worker.py index ae73f76e85..35277e5ea4 100644 --- a/queue_services/entity-pay/src/entity_pay/resources/worker.py +++ b/queue_services/entity-pay/src/entity_pay/resources/worker.py @@ -41,14 +41,9 @@ from http import HTTPStatus from typing import Optional -from flask import Blueprint -from flask import current_app -from flask import jsonify -from flask import request +from flask import Blueprint, current_app, request from legal_api.models import Filing from simple_cloudevent import SimpleCloudEvent -from werkzeug.exceptions import UnsupportedMediaType -from werkzeug.exceptions import BadRequest from entity_pay.services import queue from entity_pay.services.logging import structured_log @@ -89,10 +84,7 @@ def worker(): # 2. Get payment information # ## - if ( - not (payment_token := get_payment_token(ce)) - or payment_token.status_code != "COMPLETED" - ): + if not (payment_token := get_payment_token(ce)) or payment_token.status_code != "COMPLETED": # no payment info, or not a payment COMPLETED token, take off Q return {}, HTTPStatus.OK @@ -109,9 +101,7 @@ def worker(): structured_log(request, "INFO", f"processing payment: {payment_token.id}") # setting the payment_completion_date, marks the filing as paid - filing.payment_completion_date = datetime.datetime.utcnow().replace( - tzinfo=datetime.timezone.utc - ) + filing.payment_completion_date = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) filing.save() # None of these should bail as the filing has been marked PAID @@ -130,24 +120,17 @@ def worker(): # ## with suppress(Exception): mail_topic = current_app.config.get("ENTITY_MAILER_TOPIC", "mailer") - ret = queue.publish( - topic=mail_topic, payload=queue.to_queue_message(cloud_event) - ) - structured_log( - request, "INFO", f"publish to emailer for pay-id: {payment_token.id}" - ) + # pylint: disable-next=unused-variable + ret = queue.publish(topic=mail_topic, payload=queue.to_queue_message(cloud_event)) + structured_log(request, "INFO", f"publish to emailer for pay-id: {payment_token.id}") # 5. Publish to filer Q, if the filing is not a FED (Effective date > now()) # ## with suppress(Exception): if filing.effective_date <= filing.payment_completion_date: filer_topic = current_app.config.get("ENTITY_FILER_TOPIC", "filer") - ret = queue.publish( - topic=filer_topic, payload=queue.to_queue_message(cloud_event) - ) - structured_log( - request, "INFO", f"publish to filer for pay-id: {payment_token.id}" - ) + ret = queue.publish(topic=filer_topic, payload=queue.to_queue_message(cloud_event)) # noqa: F841 + structured_log(request, "INFO", f"publish to filer for pay-id: {payment_token.id}") structured_log(request, "INFO", f"completed ce: {str(ce)}") return {}, HTTPStatus.OK @@ -155,6 +138,8 @@ def worker(): @dataclass class PaymentToken: + """Payment Token class""" + id: Optional[str] = None status_code: Optional[str] = None filing_identifier: Optional[str] = None diff --git a/queue_services/entity-pay/src/entity_pay/services/__init__.py b/queue_services/entity-pay/src/entity_pay/services/__init__.py index 19d346fa93..aeb71bc835 100644 --- a/queue_services/entity-pay/src/entity_pay/services/__init__.py +++ b/queue_services/entity-pay/src/entity_pay/services/__init__.py @@ -34,5 +34,4 @@ """This module contains all the services used.""" from .gcp_queue import GcpQueue - queue = GcpQueue() diff --git a/queue_services/entity-pay/src/entity_pay/services/gcp_queue.py b/queue_services/entity-pay/src/entity_pay/services/gcp_queue.py index e29cf53c54..db458396fc 100644 --- a/queue_services/entity-pay/src/entity_pay/services/gcp_queue.py +++ b/queue_services/entity-pay/src/entity_pay/services/gcp_queue.py @@ -36,24 +36,29 @@ import base64 import json -from concurrent.futures import CancelledError from concurrent.futures import TimeoutError # pylint: disable=W0622 +from concurrent.futures import CancelledError from contextlib import suppress from typing import Optional from flask import Flask, current_app -from werkzeug.local import LocalProxy from google.auth import jwt from google.cloud import pubsub_v1 -from simple_cloudevent import CloudEventVersionException -from simple_cloudevent import InvalidCloudEventError -from simple_cloudevent import SimpleCloudEvent -from simple_cloudevent import from_queue_message -from simple_cloudevent import to_queue_message +from simple_cloudevent import ( + CloudEventVersionException, + InvalidCloudEventError, + SimpleCloudEvent, + from_queue_message, + to_queue_message, +) +from werkzeug.local import LocalProxy class GcpQueue: + """Provides Queue type services""" + def __init__(self, app: Flask = None): + """Initializes the GCP Queue class""" self.audience = None self.credentials_pub = None self.gcp_auth_key = None @@ -62,9 +67,11 @@ def __init__(self, app: Flask = None): self._publisher = None if app: - self.app_init(app) + self.init_app(app) def init_app(self, app: Flask): + """Initializes the application""" + self.gcp_auth_key = app.config.get("GCP_AUTH_KEY") if self.gcp_auth_key: try: @@ -77,32 +84,26 @@ def init_app(self, app: Flask): "https://pubsub.googleapis.com/google.pubsub.v1.Publisher", ) - self.service_account_info = json.loads( - base64.b64decode(self.gcp_auth_key).decode("utf-8") - ) - credentials = jwt.Credentials.from_service_account_info( - self.service_account_info, audience=audience - ) - self.credentials_pub = credentials.with_claims( - audience=publisher_audience - ) + self.service_account_info = json.loads(base64.b64decode(self.gcp_auth_key).decode("utf-8")) + credentials = jwt.Credentials.from_service_account_info(self.service_account_info, audience=audience) + self.credentials_pub = credentials.with_claims(audience=publisher_audience) except Exception as error: # noqa: B902 - raise Exception( - "Unable to create a connection", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to create a connection", error) from error # pylint: disable=W0719 @property def publisher(self): + """Returns the publisher""" + if not self._publisher and self.credentials_pub: - self._publisher = pubsub_v1.PublisherClient( - credentials=self.credentials_pub - ) + self._publisher = pubsub_v1.PublisherClient(credentials=self.credentials_pub) else: self._publisher = pubsub_v1.PublisherClient() return self.credentials_pub @staticmethod def is_valid_envelope(msg: dict): + """Checks if the envelope is valid""" + if ( msg.get("subscription") and (message := msg.get("message")) @@ -114,17 +115,15 @@ def is_valid_envelope(msg: dict): @staticmethod def get_envelope(request: LocalProxy) -> Optional[dict]: + """Returns the envelope""" + with suppress(Exception): - if (envelope := request.get_json()) and GcpQueue.is_valid_envelope( - envelope - ): + if (envelope := request.get_json()) and GcpQueue.is_valid_envelope(envelope): return envelope return None @staticmethod - def get_simple_cloud_event( - request: LocalProxy, return_raw: bool = False - ) -> type[SimpleCloudEvent | dict | None]: + def get_simple_cloud_event(request: LocalProxy, return_raw: bool = False) -> type[SimpleCloudEvent | dict | None]: """Return a SimpleCloudEvent if one is in session from the PubSub call. Parameters @@ -176,14 +175,15 @@ def publish(self, topic: str, payload: bytes): return future.result() except (CancelledError, TimeoutError) as error: - raise Exception( - "Unable to post to queue", error - ) from error # pylint: disable=W0719 + raise Exception("Unable to post to queue", error) from error # pylint: disable=W0719 @staticmethod def to_queue_message(ce: SimpleCloudEvent): + """Return a byte string of the CloudEvent in JSON format""" + return to_queue_message(ce) @staticmethod def from_queue_message(data: dict): + """Convert a queue message back to a simple CloudEvent""" return from_queue_message(data) diff --git a/queue_services/entity-pay/src/entity_pay/services/logging.py b/queue_services/entity-pay/src/entity_pay/services/logging.py index 8fb46cbb79..a72b49e940 100644 --- a/queue_services/entity-pay/src/entity_pay/services/logging.py +++ b/queue_services/entity-pay/src/entity_pay/services/logging.py @@ -40,29 +40,28 @@ def structured_log(request: LocalProxy, severity: str = "NOTICE", message: str = None): + """Prints structured log message""" frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) # Build structured log messages as an object. global_log_fields = {} - if PROJECT := os.environ.get("GOOGLE_CLOUD_PROJECT"): + if project := os.environ.get("GOOGLE_CLOUD_PROJECT"): # Add log correlation to nest all log messages. trace_header = request.headers.get("X-Cloud-Trace-Context") - if trace_header and PROJECT: + if trace_header and project: trace = trace_header.split("/") - global_log_fields[ - "logging.googleapis.com/trace" - ] = f"projects/{PROJECT}/traces/{trace[0]}" + global_log_fields["logging.googleapis.com/trace"] = f"projects/{project}/traces/{trace[0]}" # Complete a structured log entry. - entry = dict( - severity=severity, - message=message, + entry = { + "severity": severity, + "message": message, # Log viewer accesses 'component' as jsonPayload.component'. - component=f"{mod.__name__}.{frm.function}", + "component": f"{mod.__name__}.{frm.function}", **global_log_fields, - ) + } print(json.dumps(entry)) diff --git a/queue_services/entity-pay/tests/__init__.py b/queue_services/entity-pay/tests/__init__.py index 3e44a42f53..213030638f 100644 --- a/queue_services/entity-pay/tests/__init__.py +++ b/queue_services/entity-pay/tests/__init__.py @@ -14,7 +14,6 @@ """The Test Suites to ensure that the service is built and operating correctly.""" import datetime - EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362) diff --git a/queue_services/entity-pay/tests/conftest.py b/queue_services/entity-pay/tests/conftest.py index 0a9c74718c..0d1f1ee79d 100644 --- a/queue_services/entity-pay/tests/conftest.py +++ b/queue_services/entity-pay/tests/conftest.py @@ -22,9 +22,9 @@ import psycopg2 import pytest +import sqlalchemy from flask import Flask from flask_migrate import Migrate, upgrade -import sqlalchemy from sqlalchemy import event, text from sqlalchemy.schema import MetaData @@ -34,7 +34,6 @@ from . import FROZEN_DATETIME - DB_TEST_NAME: Final = os.getenv("DATABASE_TEST_NAME") @@ -73,9 +72,7 @@ def create_test_db( DATABASE_URI = DATABASE_URI[: DATABASE_URI.rfind("/")] + "/postgres" try: - with sqlalchemy.create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with sqlalchemy.create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(f"CREATE DATABASE {database}")) return True @@ -106,12 +103,8 @@ def drop_test_db( WHERE pg_stat_activity.datname = '{database}' AND pid <> pg_backend_pid(); """ - with contextlib.suppress( - sqlalchemy.exc.ProgrammingError, psycopg2.OperationalError, Exception - ): - with sqlalchemy.create_engine( - DATABASE_URI, isolation_level="AUTOCOMMIT" - ).connect() as conn: + with contextlib.suppress(sqlalchemy.exc.ProgrammingError, psycopg2.OperationalError, Exception): + with sqlalchemy.create_engine(DATABASE_URI, isolation_level="AUTOCOMMIT").connect() as conn: conn.execute(text(close_all)) conn.execute(text(f"DROP DATABASE {database}")) @@ -209,9 +202,7 @@ def session(app, db): # pylint: disable=redefined-outer-name, invalid-name @event.listens_for(sess(), "after_transaction_end") def restart_savepoint(sess2, trans): # pylint: disable=unused-variable # Detecting whether this is indeed the nested transaction of the test - if ( - trans.nested and not trans._parent.nested - ): # pylint: disable=protected-access + if trans.nested and not trans._parent.nested: # pylint: disable=protected-access # Handle where test DOESN'T session.commit(), sess2.expire_all() sess.begin_nested() diff --git a/queue_services/entity-pay/tests/unit/__init__.py b/queue_services/entity-pay/tests/unit/__init__.py index deb459e8a3..eba28cb8c2 100644 --- a/queue_services/entity-pay/tests/unit/__init__.py +++ b/queue_services/entity-pay/tests/unit/__init__.py @@ -70,7 +70,7 @@ def nested_session(session): sess = session.begin_nested() yield sess sess.rollback() - except: + except: # noqa: E722 pass finally: pass diff --git a/queue_services/entity-pay/tests/unit/services/test_gcp_queue.py b/queue_services/entity-pay/tests/unit/services/test_gcp_queue.py index e33ccd22af..7074f51681 100644 --- a/queue_services/entity-pay/tests/unit/services/test_gcp_queue.py +++ b/queue_services/entity-pay/tests/unit/services/test_gcp_queue.py @@ -2,10 +2,9 @@ from contextlib import suppress from http import HTTPStatus -import pytest import flask -from simple_cloudevent import SimpleCloudEvent -from simple_cloudevent import to_queue_message +import pytest +from simple_cloudevent import SimpleCloudEvent, to_queue_message from entity_pay.services.gcp_queue import GcpQueue @@ -20,9 +19,7 @@ } -@pytest.mark.parametrize( - "test_name,msg,expected", [("invalid", {}, False), ("valid", BASE_ENVELOPE, True)] -) +@pytest.mark.parametrize("test_name,msg,expected", [("invalid", {}, False), ("valid", BASE_ENVELOPE, True)]) def test_valid_envelope(test_name, msg, expected): """Test the validation the envelope.""" rv = GcpQueue.is_valid_envelope(msg) diff --git a/queue_services/entity-pay/tests/unit/test_worker.py b/queue_services/entity-pay/tests/unit/test_worker.py index 4b118aab7e..0c24670691 100644 --- a/queue_services/entity-pay/tests/unit/test_worker.py +++ b/queue_services/entity-pay/tests/unit/test_worker.py @@ -41,12 +41,8 @@ from legal_api.models import Filing from simple_cloudevent import SimpleCloudEvent, to_queue_message -from entity_pay.resources.worker import get_filing_by_payment_id from entity_pay.resources.worker import get_payment_token - -from tests.unit import create_legal_entity -from tests.unit import create_filing -from tests.unit import nested_session +from tests.unit import create_filing, create_legal_entity, nested_session def test_no_message(client): @@ -137,7 +133,8 @@ def test_get_payment_token(): def test_process_payment_failed(app, session, client, mocker): """Assert that an AR filling status is set to error if payment transaction failed.""" - from legal_api.models import LegalEntity, Filing + from legal_api.models import Filing, LegalEntity + from entity_pay.resources.worker import get_filing_by_payment_id from entity_pay.services import queue @@ -158,9 +155,7 @@ def test_process_payment_failed(app, session, client, mocker): } } - message = helper_create_cloud_event_envelope( - source="sbc-pay", subject="payment", data=payment_token - ) + message = helper_create_cloud_event_envelope(source="sbc-pay", subject="payment", data=payment_token) def mock_publish(): return {} @@ -185,6 +180,7 @@ def mock_publish(): def test_process_payment(app, session, client, mocker): """Assert that an AR filling status is set to error if payment transaction failed.""" from legal_api.models import Filing + from entity_pay.resources.worker import get_filing_by_payment_id from entity_pay.services import queue @@ -205,9 +201,7 @@ def test_process_payment(app, session, client, mocker): } } - message = helper_create_cloud_event_envelope( - source="sbc-pay", subject="payment", data=payment_token - ) + message = helper_create_cloud_event_envelope(source="sbc-pay", subject="payment", data=payment_token) # keep track of topics called on the mock topics = [] @@ -258,9 +252,7 @@ def helper_create_cloud_event_envelope( } } if not ce: - ce = SimpleCloudEvent( - id=cloud_event_id, source=source, subject=subject, type=type, data=data - ) + ce = SimpleCloudEvent(id=cloud_event_id, source=source, subject=subject, type=type, data=data) # # This needs to mimic the envelope created by GCP PubSb when call a resource #