Skip to content

prepare for release #30

prepare for release

prepare for release #30

name: generate-docs-pages
on:
push:
branches:
- main
concurrency: dbt_integration_tests
env:
# Set profiles.yml directory
DBT_PROFILES_DIR: ./ci
# Redshift Connection
# REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
# REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
# REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
# REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
# REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
# # BigQuery Connection
# BIGQUERY_TEST_DATABASE: ${{ secrets.BIGQUERY_TEST_DATABASE }}
# BIGQUERY_LOCATION: ${{ secrets.BIGQUERY_LOCATION }}
# BIGQUERY_SERVICE_TYPE: ${{ secrets.BIGQUERY_SERVICE_TYPE }}
# BIGQUERY_SERVICE_PROJECT_ID: ${{ secrets.BIGQUERY_SERVICE_PROJECT_ID }}
# BIGQUERY_SERVICE_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_SERVICE_PRIVATE_KEY_ID }}
# BIGQUERY_SERVICE_PRIVATE_KEY: ${{ secrets.BIGQUERY_SERVICE_PRIVATE_KEY }}
# BIGQUERY_SERVICE_CLIENT_EMAIL: ${{ secrets.BIGQUERY_SERVICE_CLIENT_EMAIL }}
# BIGQUERY_SERVICE_CLIENT_ID: ${{ secrets.BIGQUERY_SERVICE_CLIENT_ID }}
# BIGQUERY_SERVICE_AUTH_URI: ${{ secrets.BIGQUERY_SERVICE_AUTH_URI }}
# BIGQUERY_SERVICE_TOKEN_URI: ${{ secrets.BIGQUERY_SERVICE_TOKEN_URI }}
# BIGQUERY_SERVICE_AUTH_PROVIDER_X509_CERT_URL: ${{ secrets.BIGQUERY_SERVICE_AUTH_PROVIDER_X509_CERT_URL }}
# BIGQUERY_SERVICE_CLIENT_X509_CERT_URL: ${{ secrets.BIGQUERY_SERVICE_CLIENT_X509_CERT_URL }}
# Snowflake Connection
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_PASSWORD: ${{ secrets.SNOWFLAKE_TEST_PASSWORD }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_TEST_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DATABASE }}
SNOWFLAKE_TEST_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WAREHOUSE }}
# Postgres Connection
# POSTGRES_TEST_HOST: ${{ secrets.POSTGRES_TEST_HOST }}
# POSTGRES_TEST_USER: ${{ secrets.POSTGRES_TEST_USER }}
# POSTGRES_TEST_PASS: ${{ secrets.POSTGRES_TEST_PASS }}
# POSTGRES_TEST_PORT: ${{ secrets.POSTGRES_TEST_PORT }}
# POSTGRES_TEST_DBNAME: ${{ secrets.POSTGRES_TEST_DBNAME }}
# # Databricks Connection
# DATABRICKS_TEST_HOST: ${{ secrets.DATABRICKS_TEST_HOST }}
# DATABRICKS_TEST_HTTP_PATH: ${{ secrets.DATABRICKS_TEST_HTTP_PATH }}
# DATABRICKS_TEST_TOKEN: ${{ secrets.DATABRICKS_TEST_TOKEN }}
# DATABRICKS_TEST_ENDPOINT: ${{ secrets.DATABRICKS_TEST_ENDPOINT }}
jobs:
gh_pages:
runs-on: ubuntu-latest
defaults:
run:
# Run tests from integration_tests sub dir
working-directory: ./integration_tests
strategy:
matrix:
dbt_version: ["1.6.*"]
warehouse: ["snowflake"]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # otherwise, you will failed to push refs to dest repo
# Remove '*' and replace '.' with '_' in DBT_VERSION & set as SCHEMA_SUFFIX.
# SCHEMA_SUFFIX allows us to run multiple versions of dbt in parallel without overwriting the output tables
- name: Set SCHEMA_SUFFIX env
run: echo "SCHEMA_SUFFIX=$(echo ${DBT_VERSION%.*} | tr . _)" >> $GITHUB_ENV
env:
DBT_VERSION: ${{ matrix.dbt_version }}
- name: Set DEFAULT_TARGET env
run: |
echo "DEFAULT_TARGET=${{ matrix.warehouse }}" >> $GITHUB_ENV
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Install jq
run: sudo apt-get update && sudo apt-get install jq
- name: Python setup
uses: actions/setup-python@v5
with:
python-version: "3.8.x"
- name: Pip cache
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ matrix.dbt_version }}-${{ matrix.warehouse }}
restore-keys: |
${{ runner.os }}-pip-${{ matrix.dbt_version }}-${{ matrix.warehouse }}
# Install latest patch version. Upgrade if cache contains old patch version.
- name: Install dependencies
run: |
pip install pip wheel setuptools
pip install -Iv dbt-${{ matrix.warehouse }}==${{ matrix.dbt_version }} --upgrade
dbt deps
- name: Create dbt docs
run: |
dbt run-operation post_ci_cleanup --target ${{ matrix.warehouse }}
dbt seed --target ${{matrix.warehouse}} --full-refresh
dbt run --select snowplow_unified snowplow_unified_events_stg --target ${{matrix.warehouse}} --full-refresh --vars '{snowplow__allow_refresh: true, snowplow__backfill_limit_days: 243, snowplow__enable_cwv: false, snowplow__view_passthroughs: [], snowplow__session_passthroughs: [], snowplow__user_first_passthroughs: [], snowplow__user_last_passthroughs: [], snowplow__view_aggregations: [], snowplow__session_aggregations: [], snowplow__user_aggregations: [], snowplow__conversion_events: []}'
dbt docs generate --vars '{snowplow__atomic_schema: "${{ matrix.warehouse }}_snplw_unified_int_tests"}'
rm -f ../docs/catalog.json
rm -f ../docs/manifest.json
rm -f ../docs/run_results.json
rm -f ../docs/index.html
jq 'walk(if type == "object" then with_entries(select(.key | contains("snowplow_unified_integration_tests") | not)) else . end)' target/manifest.json > target/new_manifest.json
cp target/catalog.json ../docs/catalog.json
cp target/new_manifest.json ../docs/manifest.json
cp target/run_results.json ../docs/run_results.json
cp target/index.html ../docs/index.html
dbt run-operation post_ci_cleanup --target ${{ matrix.warehouse }}
- name: Commit & Push changes
uses: EndBug/add-and-commit@v9
with:
# Determines the way the action fills missing author name and email. Three options are available:
# - github_actor -> UserName <[email protected]>
# - user_info -> Your Display Name <[email protected]>
# - github_actions -> github-actions <email associated with the github logo>
# Default: github_actor
add: ./docs/catalog.json ./docs/manifest.json ./docs/run_results.json ./docs/index.html
default_author: github_actions
message: 'Update dbt docs'
new_branch: gh_pages
push: 'origin gh_pages --force'