Skip to content

Start step.

Start step. #13

# What?
#
# Run tests and edit metadata as needed to tag and release an arbitrary ref.
# Uploads to an internal archive for further processing.
#
# How?
#
# After checking out and testing the provided ref, the image is built and uploaded.
# Metadata is altered with stream processing commands.
#
# When?
#
# Called by upstream internal-release.yml workflows that live in adapter repos
name: "Internal Archive Release"
run-name: "Cloud releasing ${{inputs.dbms_name}} off of ${{ inputs.ref }}"
on:
workflow_call:
inputs:
dbms_name:
description: "The warehouse name for the adapter."
type: string
required: true
package_test_command:
description: "Package test command"
type: string
required: true
ref:
description: "The ref to use (default to main)"
type: string
default: "main"
required: false
org:
description: "The organization that maintains the adapter"
type: string
default: "dbt-labs"
required: false # only needed by third party workflows
skip_tests:
description: "Should the tests be skipped? (default to false)"
type: boolean
required: false
default: false
permissions: read-all
# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.ref }}-${{ inputs.deploy-to }}
cancel-in-progress: true
defaults:
run:
shell: bash
env:
PYTHON_TARGET_VERSION: 3.8
NOTIFICATION_PREFIX: "[Internal Archive Release]"
TEMP_PROFILE_NAME: "temp_aws_profile"
HATCH_ADAPTERS: (postgres) # Must be | deliminated list
ADAPTERS_THAT_SKIP_TESTS: (spark|trino|databricks)
jobs:
initial-setup:
runs-on: ubuntu-latest
outputs:
is_hatch_adapter: ${{ steps.check_if_adapter_builds_with_hatch.outputs.is_hatch_adapter }}
skip_tests: ${{ steps.check_if_this_build_can_skip_tests.outputs.skip_tests }}
steps:
- name: "Check if dbms_name is contained in HATCH_ADAPTERS"
id: "check_if_adapter_builds_with_hatch"
run: |
echo "HATCH_ADAPTERS='${{ env.HATCH_ADAPTERS }}'" >> $GITHUB_ENV
# note regex arg not commented to avoid unintended Bash quoting effects
if [[ "${{ inputs.dbms_name }}" =~ ${{ env.HATCH_ADAPTERS }} ]]; then
echo "is_hatch_adapter=true" >> $GITHUB_OUTPUT
else
echo "is_hatch_adapter=false" >> $GITHUB_OUTPUT
fi
- name: "Check if dbms can skip tests"
id: "check_if_this_build_can_skip_tests"
run: |
# again, regexes are unquoted to avoid quoting side effects
if [[ "${{ inputs.dbms_name }}" =~ ${{ env.ADAPTERS_THAT_SKIP_TESTS }} ]]; then
echo "skip_tests=true" >> $GITHUB_OUTPUT
elif ${{ inputs.skip_tests }}; then
echo "skip_tests=true" >> $GITHUB_OUTPUT
else
echo "skip_tests=false" >> $GITHUB_OUTPUT
fi
job-setup:
name: Job Setup
runs-on: ubuntu-latest
needs: [initial-setup]
steps:
- name: "[DEBUG] Print Variables"
run: |
echo Warehouse name: ${{ inputs.dbms_name }}
echo The release ref: ${{ inputs.ref }}
echo Package test command: ${{ inputs.package_test_command }}
- name: "Checkout provided ref, default to branch main"
uses: actions/checkout@v4
with:
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}"
ref: "${{ inputs.ref }}"
run-unit-tests-tox:
name: 'Unit Tests (Tox)'
runs-on: ubuntu-latest
needs: [initial-setup, job-setup]
if: |
needs.initial-setup.outputs.is_hatch_adapter == 'false' &&
needs.initial-setup.outputs.skip_tests == 'false'
env:
TOXENV: unit
steps:
- name: "Checkout provided ref, default to branch main"
uses: actions/checkout@v4
with:
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}"
ref: "${{ inputs.ref }}"
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_TARGET_VERSION }}
- name: "Install Python Dependencies"
run: |
python -m pip install --user --upgrade pip
python -m pip install tox
python -m pip --version
python -m tox --version
- name: "Run Tests using tox"
run: tox
run-unit-tests-hatch:
name: 'Unit Tests (Hatch)'
runs-on: ubuntu-latest
needs: [initial-setup, job-setup]
if: |
needs.initial-setup.outputs.is_hatch_adapter == 'true' &&
needs.initial-setup.outputs.skip_tests == 'false'
steps:
- name: "Checkout provided ref, default to branch main"
uses: actions/checkout@v4
with:
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}"
ref: "${{ inputs.ref }}"
- name: "Setup `hatch`"
uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main
- name: "Run Tests using hatch"
run: hatch run unit-tests:all
####################
# Integration Tests
####################
run-integration-tests-tox:
name: 'Integration Tests (Tox)'
runs-on: ubuntu-latest
needs: [initial-setup, job-setup, run-unit-tests-tox]
if: |
needs.initial-setup.outputs.is_hatch_adapter == 'false' &&
needs.initial-setup.outputs.skip_tests == 'false'
env:
TOXENV: integration
steps:
- name: "Checkout provided ref, default to branch main"
uses: actions/checkout@v4
with:
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}"
ref: "${{ inputs.ref }}"
- name: "Setup Environment Variables - ./scripts/env-setup.sh"
run: |
if [ -f './scripts/env-setup.sh' ]; then
source ./scripts/env-setup.sh
fi
- name: "Setup Environment Variables - Secrets Context"
uses: actions/github-script@v7
id: check-env
with:
result-encoding: string
script: |
try {
const { SECRETS_CONTEXT, INTEGRATION_TESTS_SECRETS_PREFIX } = process.env
const secrets = JSON.parse(SECRETS_CONTEXT)
if (INTEGRATION_TESTS_SECRETS_PREFIX) {
for (const [key, value] of Object.entries(secrets)) {
if (key.startsWith(INTEGRATION_TESTS_SECRETS_PREFIX)) {
core.exportVariable(key, value)
}
}
} else {
core.info("The INTEGRATION_TESTS_SECRETS_PREFIX env variable is empty or not defined, skipping the secrets setup.")
}
} catch (err) {
core.error("Error while reading or parsing the JSON")
core.setFailed(err)
}
env:
SECRETS_CONTEXT: ${{ toJson(secrets) }}
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_TARGET_VERSION }}
- name: "Install Python Dependencies"
run: |
python -m pip install --user --upgrade pip
python -m pip --version
python -m pip install tox
tox --version
- name: "Run Tests using tox"
run: tox
run-integration-tests-hatch:
name: 'Integration Tests (Hatch)'
needs: [initial-setup, job-setup, run-unit-tests-hatch]
if: |
needs.initial-setup.outputs.is_hatch_adapter == 'true' &&
needs.initial-setup.outputs.skip_tests == 'false'
uses: "dbt-labs/dbt-postgres/.github/workflows/integration-tests.yml@main"
with:
core_branch: "main"
dbt_adapters_branch: "main"
####################
# Artifact Handling
####################
create-internal-release:
name: Create release for internal archive
runs-on: ubuntu-latest
needs: [initial-setup, job-setup, run-integration-tests-tox, run-integration-tests-hatch]
# Build artifact if
# 1. Setup jobs succeeded
# 2a. Tests can be skipped
# 2b. One of the integration test sets passed (these only run on passing unit tests)
if: |
always() &&
needs.job-setup.result == 'success' &&
(
needs.initial-setup.outputs.skip_tests == 'true' ||
(needs.run-integration-tests-tox.result == 'success' || needs.run-integration-tests-hatch.result == 'success')
)
steps:
- name: "Checkout provided ref, default to branch main"
uses: actions/checkout@v4
with:
repository: "${{ inputs.org }}/dbt-${{ inputs.dbms_name }}"
ref: "${{ inputs.ref }}"
- name: "Set up Python - ${{ env.PYTHON_TARGET_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_TARGET_VERSION }}
- name: "Install Python Dependencies"
run: |
python -m pip install --user --upgrade pip
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
python -m pip --version
- name: "Authenticate with AWS"
uses: aws-actions/configure-aws-credentials@v4
id: aws-signin
with:
role-to-assume: arn:aws:iam:: #TODO

Check failure on line 292 in .github/workflows/internal-archive-release.yml

View workflow run for this annotation

GitHub Actions / .github/workflows/internal-archive-release.yml

Invalid workflow file

You have an error in your yaml syntax on line 292
aws-region: ${{ secrets.AWS_REGION }}
role-session-name: gha-internal-release-secrets-manager-${{ github.run_id }}
role-duration-seconds: 900 # 15 minutes
output-credentials: true
- name: "Configure AWS profile for upload"
run: |
aws configure set aws_access_key_id ${{ steps.aws-signin.outputs.aws-access-key-id }} --profile ${{ env.TEMP_PROFILE_NAME }}
aws configure set aws_secret_access_key ${{ steps.aws-signin.outputs.aws-secret-access-key }} --profile ${{ env.TEMP_PROFILE_NAME }}
aws configure set region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }}
aws configure set output text --profile ${{ env.TEMP_PROFILE_NAME }}
aws codeartifact login --tool twine --repository ${{ secrets.AWS_REPOSITORY }} \
--domain ${{ secrets.AWS_DOMAIN }} --domain-owner ${{ secrets.AWS_DOMAIN_OWNER }} \
--region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }}
- name: "Get version in package and versions published to internal pypi"
run: |
versions_published="$(aws codeartifact list-package-versions --repository ${{ secrets.AWS_REPOSITORY }} \
--domain ${{ secrets.AWS_DOMAIN }} --domain-owner ${{ secrets.AWS_DOMAIN_OWNER }} \
--region ${{ secrets.AWS_REGION }} --profile ${{ env.TEMP_PROFILE_NAME }} --format pypi \
--package dbt-${{ inputs.dbms_name }} --output json \
--query 'versions[*].version' | jq -r '.[]' | grep "^${{ inputs.version_number }}" || true )" # suppress pipefail only here
version_file="dbt/adapters/${{ inputs.dbms_name }}/__version__.py"
version_in_file=$(grep -E 'version(: str)? =' "${version_file}" | cut -d '"' -f2)
echo "[Debug] version_in_file: ${version_in_file}"
echo "CURRENT_PKG_VERSION=${version_in_file}" >> "$GITHUB_ENV"
echo "VERSIONS_PUBLISHED=$(echo "${versions_published[*]}"| tr '\n' ',')" >> "$GITHUB_ENV"
echo "VERSION_FILE=${version_file}" >> "$GITHUB_ENV"
- name: "Determine next Cloud release version"
id: get_next_cloud_release_version
uses: "dbt-labs/dbt-release/.github/actions/next-cloud-release-version@main"
with:
version_number: $CURRENT_PKG_VERSION
versions_published: $VERSIONS_PUBLISHED
- name: "Update version in package and setup.py"
run: |
cloud_release_version="${{ steps.get_next_cloud_release_version.outputs.internal_release_version }}"
commit_sha="$(git rev-parse HEAD)"
setup_file="./setup.py"
echo ">>> Altering ${version_file}"
# Ensure a build+xxx where xxx is an integer is always present in versioning
# sed may be a no-op -- this is fine!
v="${cloud_release_version}+${commit_sha}"
tee <<< "version = \"${v}\"" "${VERSION_FILE}"
if [ -f "${setup_file}" ]; then
sed -i "s/^package_version = .*$/package_version = \"${v}\"/" "${setup_file}"
fi
################
# Build package
################
#
# 1. Build with setup.py
#
- name: "Build Distributions - scripts/build-dist.sh"
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'false' }}"
run: |
if [ -f scripts/build-dist.sh ]; then
scripts/build-dist.sh
else
# Fallback onto to basic command
python setup.py sdist bdist_wheel
fi
#
# 2. Build with Hatch
#
- name: "Setup `hatch`"
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}"
uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main
- name: "Build Distributions - hatch"
if: "${{ needs.initial-setup.outputs.is_hatch_adapter == 'true' }}"
run: hatch build
###################
# Check and publish
###################
- name: "[DEBUG] Show Distributions"
run: ls -lh dist/
- name: "Check Distribution Descriptions"
run: |
version_file="dbt/adapters/__version__.py"
if [[ "${{ needs.initial-setup.outputs.is_hatch_adapter }}" == 'true' ]]; then
hatch run build:check-all
else
twine check dist/*
fi
- name: "[DEBUG] Check Wheel Contents"
run: |
check-wheel-contents dist/*.whl --ignore W007,W008
- name: "Upload Build Artifact - ${{ steps.get_next_internal_release_version.outputs.internal_release_version }}"
run: |
twine upload --repository codeartifact dist/*
version_file="$(echo "dbt/adapters/${{ inputs.dbms_name }}/__version__.py")"
version="$(grep 'version =' "${version_file}" | cut -d '"' -f2)"
message="-- Success -- released ${version}"
echo "::notice $NOTIFICATION_PREFIX::$message"