Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions jobs/permanent/assets-registrations-staff/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,12 @@ ENV APP_ENV=${APP_ENV} \
PIP_DEFAULT_TIMEOUT=100 \
PIP_ROOT_USER_ACTION=ignore \
# poetry:
POETRY_VERSION=1.8.3 \
POETRY_VERSION=2.1.3 \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false \
POETRY_CACHE_DIR='/var/cache/pypoetry' \
POETRY_HOME='/usr/local'
POETRY_HOME='/usr/local' \
POETRY_INSTALLER_PARALLEL=false

SHELL ["/bin/bash", "-eo", "pipefail", "-c"]

Expand Down Expand Up @@ -66,6 +67,7 @@ RUN groupadd -g "${GID}" -r web \
COPY --chown=web:web ./poetry.lock ./pyproject.toml /code/

COPY --chown=web:web ./src /code/src
COPY ./run.sh /code
COPY --chown=web:web ./README.md /code

# Project initialization:
Expand All @@ -76,11 +78,12 @@ RUN --mount=type=cache,target="$POETRY_CACHE_DIR" \
&& poetry run pip install -U pip \
&& poetry install \
$(if [ -z ${APP_ENV+x} ] | [ "$APP_ENV" = 'production' ]; then echo '--only main'; fi) \
--no-interaction --no-ansi
--no-interaction --no-ansi \
&& chmod 755 /code/run.sh

USER web

# Set Python path
ENV PYTHONPATH=/code/src

CMD [ "python", "-m", "assets_registrations_staff" ]
CMD [ "/code/run.sh" ]
2 changes: 1 addition & 1 deletion jobs/permanent/assets-registrations-staff/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ BC Registries Assets Prune/Purge Staff Registrations Service

## Technology Stack Used
* Python
* Postgres - psycopg2-binary
* Postgres
* GCP Artifact Registry
* GCP Cloud Run Jobs
* GCP Cloud Scheduler
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apiVersion: deploy.cloud.google.com/v1
kind: DeliveryPipeline
metadata:
name: ppr-assets-reg-staff-pipeline
description: Deployment pipeline
serialPipeline:
stages:
- targetId: eogruh-dev
profiles: [dev]
strategy:
standard:
verify: false
deployParameters:
- values:
deploy-env: "development"
deploy-project-id: "eogruh-dev"
job-name: "ppr-assets-reg-staff-dev"
run-command: "/code/run.sh"
cloudsql-instances: "eogruh-dev:northamerica-northeast1:ppr-dev-cloudsql"
service-account: "sa-job@eogruh-dev.iam.gserviceaccount.com"
- targetId: eogruh-test
profiles: [test]
strategy:
standard:
verify: false
deployParameters:
- values:
deploy-env: "test"
deploy-project-id: "eogruh-test"
job-name: "ppr-assets-reg-staff-test"
run-command: "/code/run.sh"
app-env: "test"
cloudsql-instances: "eogruh-test:northamerica-northeast1:ppr-test-cloudsql"
service-account: "sa-job@eogruh-test.iam.gserviceaccount.com"
- targetId: eogruh-sandbox
profiles: [sandbox]
strategy:
standard:
verify: false
deployParameters:
- values:
deploy-env: "sandbox"
deploy-project-id: "eogruh-sandbox"
job-name: "ppr-assets-reg-staff-sandbox"
run-command: "/code/run.sh"
app-env: "sandbox"
cloudsql-instances: "eogruh-sandbox:northamerica-northeast1:ppr-sandbox-pgdb"
service-account: "sa-job@eogruh-sandbox.iam.gserviceaccount.com"
- targetId: eogruh-prod
profiles: [prod]
strategy:
standard:
verify: false
deployParameters:
- values:
deploy-env: "production"
deploy-project-id: "eogruh-prod"
job-name: "ppr-assets-reg-staff-prod"
run-command: "/code/run.sh"
app-env: "production"
cloudsql-instances: "eogruh-prod:northamerica-northeast1:ppr-prod"
service-account: "sa-job@eogruh-prod.iam.gserviceaccount.com"
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
DATABASE_NAME="op://database/$APP_ENV/ppr-db-gcp/DATABASE_NAME"
DATABASE_USERNAME="op://database/$APP_ENV/ppr-db-gcp/JOB_DATABASE_USERNAME"
CLOUDSQL_INSTANCE_CONNECTION_NAME="op://database/$APP_ENV/ppr-db-gcp/DATABASE_INSTANCE_CONNECTION_NAME"
VPC_CONNECTOR="op://CD/$APP_ENV/ppr-assets-reg-staff/VPC_CONNECTOR"
1,390 changes: 1,309 additions & 81 deletions jobs/permanent/assets-registrations-staff/poetry.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions jobs/permanent/assets-registrations-staff/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,17 @@ email-validator= "1.3.1"
phonenumbers = "8.13.39"
notifications-python-client = "7.0.0"
beautifulsoup4 = "4.12.3"
pg8000 = "^1.31.2"
cachecontrol = "^0.14.0"
structured-logging = { git = "https://github.com/bcgov/sbc-connect-common.git", subdirectory = "python/structured-logging", branch = "main" }
cloud-sql-connector = { git = "https://github.com/bcgov/sbc-connect-common.git", subdirectory = "python/cloud-sql-connector", branch = "main" }
oauthlib = "^3.2.2"
grpcio-status = "^1.51.3"
pytz = "^2022.7.1"
datedelta = "^1.4"
PyYAML = "^6.0.2"


[tool.poetry.group.test.dependencies]
psycopg2-binary = "^2.9.9"
pytest = "^8.2.2"
pytest-cov = "^5.0.0"
pytest-mock = "^3.14.0"
Expand Down
4 changes: 4 additions & 0 deletions jobs/permanent/assets-registrations-staff/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env bash
set -euo pipefail

exec python -m assets_registrations_staff
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0 19 * * 5
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,20 @@ class Config(BaseConfig):

# service accounts

APP_DB_USER = os.getenv("APP_DATABASE_USERNAME", "")
APP_DB_PASSWORD = os.getenv("APP_DATABASE_PASSWORD", "")
APP_DB_NAME = os.getenv("APP_DATABASE_NAME", "")
APP_DB_HOST = os.getenv("APP_DATABASE_HOST", "")
APP_DB_PORT = os.getenv("APP_DATABASE_PORT", "5432")
APP_DB_USER = os.getenv("DATABASE_USERNAME", "")
APP_DB_PASSWORD = os.getenv("DATABASE_PASSWORD", "")
APP_DB_NAME = os.getenv("DATABASE_NAME", "")
APP_DB_HOST = os.getenv("DATABASE_HOST", "")
APP_DB_PORT = os.getenv("DATABASE_PORT", "5432")
# POSTGRESQL
# POSTGRESQL
if APP_DB_UNIX_SOCKET := os.getenv("APP_DATABASE_UNIX_SOCKET", None):
APP_DATABASE_URI = f"postgresql://{APP_DB_USER}:{APP_DB_PASSWORD}@/{APP_DB_NAME}?host={APP_DB_UNIX_SOCKET}"
else:
APP_DATABASE_URI = f"postgresql://{APP_DB_USER}:{APP_DB_PASSWORD}@{APP_DB_HOST}:{APP_DB_PORT}/{APP_DB_NAME}"
CLOUDSQL_INSTANCE_CONNECTION_NAME = os.getenv("CLOUDSQL_INSTANCE_CONNECTION_NAME", "")
DB_IP_TYPE = os.getenv("DATABASE_IP_TYPE", "private").lower()
APP_DATABASE_URI = ""

if CLOUDSQL_INSTANCE_CONNECTION_NAME:
SQLALCHEMY_DATABASE_URI = "postgresql+pg8000://"
if APP_DB_HOST:
APP_DATABASE_URI = f"postgresql+pg8000://{APP_DB_USER}:{APP_DB_PASSWORD}@{APP_DB_HOST}:{APP_DB_PORT}/{APP_DB_NAME}"
SQLALCHEMY_DATABASE_URI = (
f"postgresql+pg8000://{APP_DB_USER}:{APP_DB_PASSWORD}@{APP_DB_HOST}:{APP_DB_PORT}/{APP_DB_NAME}"
)
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,17 @@
"""This module executes all the job steps."""
import sys
from contextlib import suppress
from typing import Final
from typing import Any, Final

import psycopg2
from cloud_sql_connector import DBConfig, getconn
from pg8000 import dbapi as pg8000

from assets_registrations_staff.config import Config
from assets_registrations_staff.utils.logging import logger

DbConnection = Any
DbCursor = Any

COUNT_QUERY: Final = """
SELECT (select count(mer.id)
from mhr_extra_registrations mer
Expand Down Expand Up @@ -78,7 +82,7 @@
"""


def delete_mhr_other(db_conn: psycopg2.extensions.connection, db_cursor: psycopg2.extensions.cursor):
def delete_mhr_other(db_conn: DbConnection, db_cursor: DbCursor):
"""Remove stale registrations created by non-staff from the mhr_other_registrations table."""
try:
if not db_conn or not db_cursor:
Expand All @@ -87,13 +91,13 @@ def delete_mhr_other(db_conn: psycopg2.extensions.connection, db_cursor: psycopg
db_cursor.execute(sql_statement)
db_conn.commit()
logger.info("Delete stale other account registrations successful.")
except (psycopg2.Error, Exception) as err:
except Exception as err:
logger.error(f"Delete stale other account registrations failed: {err}.")


def remove_mhr_registration(
db_conn: psycopg2.extensions.connection,
db_cursor: psycopg2.extensions.cursor,
db_conn: DbConnection,
db_cursor: DbCursor,
mhr_num: str,
):
"""Create a record for a staff created new MH registration to be removed from the staff table as stale."""
Expand All @@ -103,13 +107,13 @@ def remove_mhr_registration(
sql_statement = INSERT_EXTRA_REG.format(mhr_number=mhr_num)
db_cursor.execute(sql_statement)
db_conn.commit()
except (psycopg2.Error, Exception) as err:
except Exception as err:
error_message = f"Error attempting mhr_extra_registrations insert MHR#={mhr_num}: {err}"
logger.error(error_message)


def remove_mhr_staff_reg(
db_conn: psycopg2.extensions.connection, db_cursor: psycopg2.extensions.cursor, count_data: dict
db_conn: DbConnection, db_cursor: DbCursor, count_data: dict
):
"""Revert MHR drafts in a payment pending state to the regular draft state."""
try:
Expand All @@ -123,12 +127,12 @@ def remove_mhr_staff_reg(
for mhr_num in mhr_numbers:
remove_mhr_registration(db_conn, db_cursor, mhr_num)
logger.info(f"Remove staff MHR registrations completed for MHR numbers {staff_mhr_nums}")
except (psycopg2.Error, Exception) as err:
except Exception as err:
error_message = f"Remove staff MHR registrations failed: {err}"
logger.error(error_message)


def run_count_query(db_conn: psycopg2.extensions.connection, db_cursor: psycopg2.extensions.cursor) -> dict:
def run_count_query(db_conn: DbConnection, db_cursor: DbCursor) -> dict:
"""Execute the count staff registrations query."""
count_data: dict = {}
try:
Expand All @@ -140,19 +144,36 @@ def run_count_query(db_conn: psycopg2.extensions.connection, db_cursor: psycopg2
count_data["staff_reg_count"] = int(row[1])
count_data["staff_reg_nums"] = str(row[2]) if row[2] else ""
logger.info(f"Count query results: {str(count_data)}")
except (psycopg2.Error, Exception) as err:
except Exception as err:
error_message = f"Error attempting to run status query: {err}"
logger.error(error_message)
return count_data


def job(config: Config):
"""Execute the job."""
db_conn: psycopg2.extensions.connection
db_cursor: psycopg2.extensions.cursor
db_conn: DbConnection | None = None
db_cursor: DbCursor | None = None
try:
logger.info("Getting database connection and cursor.")
db_conn = psycopg2.connect(dsn=config.APP_DATABASE_URI)
if config.CLOUDSQL_INSTANCE_CONNECTION_NAME: # pragma: no cover
db_config = DBConfig(
instance_name=config.CLOUDSQL_INSTANCE_CONNECTION_NAME,
database=config.APP_DB_NAME,
user=config.APP_DB_USER,
ip_type=config.DB_IP_TYPE,
pool_recycle=60,
schema="public",
)
db_conn = getconn(db_config)
else:
db_conn = pg8000.connect(
user=config.APP_DB_USER,
password=config.APP_DB_PASSWORD,
host=config.APP_DB_HOST,
port=int(config.APP_DB_PORT),
database=config.APP_DB_NAME,
)
db_cursor = db_conn.cursor()
count_data = run_count_query(db_conn, db_cursor)
if not count_data.get("other_reg_count") or count_data.get("other_reg_count") < 1:
Expand All @@ -161,13 +182,15 @@ def job(config: Config):
delete_mhr_other(db_conn, db_cursor)
remove_mhr_staff_reg(db_conn, db_cursor, count_data)
logger.info("Run completed.")
except (psycopg2.Error, Exception) as err:
except Exception as err:
job_message: str = f"Run failed: {str(err)}."
logger.error(job_message)
sys.exit(1) # Retry Job Task by exiting the process
finally:
# Clean up: Close the database cursor and connection
with suppress(Exception):
db_cursor.close()
with suppress(Exception):
db_conn.close()
if db_cursor:
with suppress(Exception):
db_cursor.close()
if db_conn:
with suppress(Exception):
db_conn.close()
Loading