Skip to content

Commit e2071b1

Browse files
committed
harden testing workflows
- env-var-ize all ${{ }} expressions inside `run:` blocks across the test-warehouse, test-all-warehouses, test-all-warehouses-dbt-pre-releases, and cleanup-stale-schemas workflows - validate inputs.max-age-hours (fail-closed on non-integer) - SHA-pin pmeier/pytest-results-action@v0.8.0 - deny GITHUB_TOKEN by default, grant minimum per job Made-with: Cursor
1 parent 04d8fde commit e2071b1

4 files changed

Lines changed: 88 additions & 35 deletions

File tree

.github/workflows/cleanup-stale-schemas.yml

Lines changed: 25 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,19 @@ on:
1212
default: "24"
1313
description: Drop schemas older than this many hours
1414

15+
permissions: {}
16+
1517
env:
1618
TESTS_DIR: ${{ github.workspace }}/dbt-data-reliability/integration_tests
1719

1820
jobs:
1921
cleanup:
2022
runs-on: ubuntu-latest
23+
permissions:
24+
contents: read
25+
env:
26+
WAREHOUSE: ${{ matrix.warehouse-type }}
27+
MAX_AGE_HOURS: ${{ inputs.max-age-hours || '24' }}
2128
strategy:
2229
fail-fast: false
2330
matrix:
@@ -28,6 +35,14 @@ jobs:
2835
- databricks_catalog
2936
- athena
3037
steps:
38+
- name: Validate max-age-hours input
39+
# Fail-closed on non-integer input before it reaches dbt run-operation.
40+
run: |
41+
if ! [[ "$MAX_AGE_HOURS" =~ ^[0-9]+$ ]]; then
42+
echo "::error::Invalid max-age-hours: '$MAX_AGE_HOURS' (must be a non-negative integer)"
43+
exit 1
44+
fi
45+
3146
- name: Checkout dbt package
3247
uses: actions/checkout@v6
3348
with:
@@ -40,19 +55,18 @@ jobs:
4055
cache: "pip"
4156

4257
- name: Install dbt
43-
run: >
44-
pip install
45-
"dbt-core"
46-
"dbt-${{ (matrix.warehouse-type == 'databricks_catalog' && 'databricks') || (matrix.warehouse-type == 'athena' && 'athena-community') || matrix.warehouse-type }}"
58+
env:
59+
DBT_ADAPTER_PKG: ${{ (matrix.warehouse-type == 'databricks_catalog' && 'databricks') || (matrix.warehouse-type == 'athena' && 'athena-community') || matrix.warehouse-type }}
60+
run: pip install "dbt-core" "dbt-${DBT_ADAPTER_PKG}"
4761

4862
- name: Write dbt profiles
4963
env:
5064
CI_WAREHOUSE_SECRETS: ${{ secrets.CI_WAREHOUSE_SECRETS || '' }}
5165
run: |
5266
# The cleanup job doesn't create schemas, but generate_profiles.py
5367
# requires --schema-name. Use a dummy value.
54-
python "${{ github.workspace }}/dbt-data-reliability/integration_tests/profiles/generate_profiles.py" \
55-
--template "${{ github.workspace }}/dbt-data-reliability/integration_tests/profiles/profiles.yml.j2" \
68+
python "$GITHUB_WORKSPACE/dbt-data-reliability/integration_tests/profiles/generate_profiles.py" \
69+
--template "$GITHUB_WORKSPACE/dbt-data-reliability/integration_tests/profiles/profiles.yml.j2" \
5670
--output ~/.dbt/profiles.yml \
5771
--schema-name "cleanup_placeholder"
5872
@@ -61,13 +75,13 @@ jobs:
6175
run: dbt deps
6276

6377
- name: Symlink local elementary package
64-
run: ln -sfn ${{ github.workspace }}/dbt-data-reliability ${{ env.TESTS_DIR }}/dbt_project/dbt_packages/elementary
78+
run: ln -sfn "$GITHUB_WORKSPACE/dbt-data-reliability" "$TESTS_DIR/dbt_project/dbt_packages/elementary"
6579

6680
- name: Drop stale CI schemas
6781
working-directory: ${{ env.TESTS_DIR }}/dbt_project
6882
# Only dbt_ prefixed schemas are created in this repo's CI.
6983
# The elementary repo has its own workflow for py_ prefixed schemas.
70-
run: >
71-
dbt run-operation drop_stale_ci_schemas
72-
--args '{prefixes: ["dbt_"], max_age_hours: ${{ inputs.max-age-hours || '24' }}}'
73-
-t "${{ matrix.warehouse-type }}"
84+
run: |
85+
dbt run-operation drop_stale_ci_schemas \
86+
--args "{prefixes: [\"dbt_\"], max_age_hours: ${MAX_AGE_HOURS}}" \
87+
-t "$WAREHOUSE"

.github/workflows/test-all-warehouses-dbt-pre-releases.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,12 @@ name: Test all warehouse platforms on dbt pre-releases
22
on:
33
workflow_dispatch:
44

5+
permissions: {}
6+
57
jobs:
68
test:
9+
permissions:
10+
contents: read
711
uses: ./.github/workflows/test-all-warehouses.yml
812
secrets: inherit
913
with:

.github/workflows/test-all-warehouses.yml

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ on:
3434
type: string
3535
required: false
3636

37+
permissions: {}
38+
3739
jobs:
3840
# ── Local targets ─────────────────────────────────────────────────────
3941
# No secrets needed — run on pull_request (works for forks without approval).
@@ -42,6 +44,8 @@ jobs:
4244
# fully in-process adapters (duckdb).
4345
test-local:
4446
if: github.event_name != 'pull_request_target'
47+
permissions:
48+
contents: read
4549
strategy:
4650
fail-fast: false
4751
matrix:
@@ -88,35 +92,41 @@ jobs:
8892
# Determine if this is a fork PR and skip if wrong trigger is used
8993
check-fork-status:
9094
runs-on: ubuntu-latest
95+
permissions: {}
9196
outputs:
9297
is_fork: ${{ steps.check.outputs.is_fork }}
9398
should_skip: ${{ steps.check.outputs.should_skip }}
9499
steps:
95100
- name: Check if PR is from fork
96101
id: check
102+
env:
103+
EVENT_NAME: ${{ github.event_name }}
104+
PR_REPO: ${{ github.event.pull_request.head.repo.full_name }}
105+
BASE_REPO: ${{ github.repository }}
97106
run: |
98107
IS_FORK="false"
99108
SHOULD_SKIP="false"
100109
101-
if [[ "${{ github.event_name }}" == "pull_request" || "${{ github.event_name }}" == "pull_request_target" ]]; then
102-
if [[ "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]]; then
110+
if [[ "$EVENT_NAME" == "pull_request" || "$EVENT_NAME" == "pull_request_target" ]]; then
111+
if [[ "$PR_REPO" != "$BASE_REPO" ]]; then
103112
IS_FORK="true"
104113
fi
105114
106115
# Skip if: pull_request from fork (should use pull_request_target) OR pull_request_target from non-fork (should use pull_request)
107-
if [[ "${{ github.event_name }}" == "pull_request" && "$IS_FORK" == "true" ]]; then
116+
if [[ "$EVENT_NAME" == "pull_request" && "$IS_FORK" == "true" ]]; then
108117
SHOULD_SKIP="true"
109-
elif [[ "${{ github.event_name }}" == "pull_request_target" && "$IS_FORK" == "false" ]]; then
118+
elif [[ "$EVENT_NAME" == "pull_request_target" && "$IS_FORK" == "false" ]]; then
110119
SHOULD_SKIP="true"
111120
fi
112121
fi
113122
114-
echo "is_fork=$IS_FORK" >> $GITHUB_OUTPUT
115-
echo "should_skip=$SHOULD_SKIP" >> $GITHUB_OUTPUT
123+
echo "is_fork=$IS_FORK" >> "$GITHUB_OUTPUT"
124+
echo "should_skip=$SHOULD_SKIP" >> "$GITHUB_OUTPUT"
116125
117126
# Approval gate for fork PRs (only runs once for all platforms)
118127
approve-fork:
119128
runs-on: ubuntu-latest
129+
permissions: {}
120130
needs: [check-fork-status]
121131
if: needs.check-fork-status.outputs.should_skip != 'true' && needs.check-fork-status.outputs.is_fork == 'true'
122132
environment: elementary_test_env
@@ -126,6 +136,8 @@ jobs:
126136

127137
test-cloud:
128138
needs: [check-fork-status, approve-fork]
139+
permissions:
140+
contents: read
129141
if: |
130142
! cancelled() &&
131143
needs.check-fork-status.result == 'success' &&

.github/workflows/test-warehouse.yml

Lines changed: 41 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ on:
5151
default: "latest_official"
5252
required: false
5353

54+
permissions: {}
55+
5456
env:
5557
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
5658
TESTS_DIR: ${{ github.workspace }}/dbt-data-reliability/integration_tests
@@ -59,6 +61,11 @@ jobs:
5961
test:
6062
runs-on: ubuntu-latest
6163
timeout-minutes: 60
64+
permissions:
65+
contents: read
66+
env:
67+
WAREHOUSE: ${{ inputs.warehouse-type }}
68+
DBT_VERSION: ${{ inputs.dbt-version }}
6269
concurrency:
6370
# Serialises runs for the same warehouse × dbt-version × branch.
6471
# The schema name is derived from a hash of this group (see "Write dbt profiles").
@@ -160,37 +167,43 @@ jobs:
160167
161168
- name: Install dbt-vertica
162169
if: inputs.warehouse-type == 'vertica' && inputs.dbt-version != 'fusion'
170+
env:
171+
DBT_CORE_PIN: ${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}
163172
run: |
164173
# dbt-vertica pins dbt-core~=1.8 which lacks native support for the
165174
# "arguments" test property used by the integration-test framework.
166175
# Install dbt-vertica without deps, then install the requested
167176
# dbt-core version separately (dbt-vertica works fine with newer
168177
# dbt-core versions).
169178
pip install dbt-vertica --no-deps
170-
pip install vertica-python \
171-
"dbt-core${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}"
179+
pip install vertica-python "dbt-core${DBT_CORE_PIN}"
172180
173181
- name: Install dbt
174182
if: ${{ inputs.dbt-version != 'fusion' && inputs.warehouse-type != 'vertica' }}
175-
run:
176-
pip install${{ (inputs.dbt-version == 'latest_pre' && ' --pre') || '' }}
177-
"dbt-core${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}"
178-
"dbt-${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || (inputs.warehouse-type == 'spark' && 'spark[PyHive]') || (inputs.warehouse-type == 'athena' && 'athena-community') || inputs.warehouse-type }}${{ (!startsWith(inputs.dbt-version, 'latest') && format('~={0}', inputs.dbt-version)) || '' }}"
183+
env:
184+
PIP_PRE_FLAG: ${{ (inputs.dbt-version == 'latest_pre' && '--pre') || '' }}
185+
DBT_CORE_PIN: ${{ (!startsWith(inputs.dbt-version, 'latest') && format('=={0}', inputs.dbt-version)) || '' }}
186+
DBT_ADAPTER_PKG: ${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || (inputs.warehouse-type == 'spark' && 'spark[PyHive]') || (inputs.warehouse-type == 'athena' && 'athena-community') || inputs.warehouse-type }}
187+
DBT_ADAPTER_PIN: ${{ (!startsWith(inputs.dbt-version, 'latest') && format('~={0}', inputs.dbt-version)) || '' }}
188+
run: |
189+
pip install $PIP_PRE_FLAG "dbt-core${DBT_CORE_PIN}" "dbt-${DBT_ADAPTER_PKG}${DBT_ADAPTER_PIN}"
179190
180191
- name: Install dbt-fusion
181192
if: inputs.dbt-version == 'fusion'
182193
run: |
183194
curl -fsSL https://public.cdn.getdbt.com/fs/install/install.sh | sh -s --
184195
185196
- name: Install Elementary
197+
env:
198+
ELEMENTARY_EXTRA: ${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || inputs.warehouse-type }}
186199
run: |
187200
# For Vertica, dbt-vertica is already installed with --no-deps above;
188201
# using ".[vertica]" would re-resolve dbt-vertica's deps and downgrade
189202
# dbt-core to ~=1.8. Install elementary without the adapter extra.
190-
if [ "${{ inputs.warehouse-type }}" = "vertica" ]; then
203+
if [ "$WAREHOUSE" = "vertica" ]; then
191204
pip install "./elementary"
192205
else
193-
pip install "./elementary[${{ (inputs.warehouse-type == 'databricks_catalog' && 'databricks') || inputs.warehouse-type }}]"
206+
pip install "./elementary[${ELEMENTARY_EXTRA}]"
194207
fi
195208
196209
- name: Write dbt profiles
@@ -205,24 +218,25 @@ jobs:
205218
# Budget (PostgreSQL 63-char limit):
206219
# dbt_(4) + timestamp(13) + _(1) + branch(≤18) + _(1) + hash(8) = 45
207220
# + _elementary(11) + _gw7(4) = 60
208-
CONCURRENCY_GROUP="tests_${{ inputs.warehouse-type }}_dbt_${{ inputs.dbt-version }}_${BRANCH_NAME}"
221+
CONCURRENCY_GROUP="tests_${WAREHOUSE}_dbt_${DBT_VERSION}_${BRANCH_NAME}"
209222
SHORT_HASH=$(echo -n "$CONCURRENCY_GROUP" | sha256sum | head -c 8)
210223
SAFE_BRANCH=$(echo "${BRANCH_NAME}" | awk '{print tolower($0)}' | sed "s/[^a-z0-9]/_/g; s/__*/_/g" | head -c 18)
211224
DATE_STAMP=$(date -u +%y%m%d_%H%M%S)
212225
SCHEMA_NAME="dbt_${DATE_STAMP}_${SAFE_BRANCH}_${SHORT_HASH}"
213226
214227
echo "Schema name: $SCHEMA_NAME (branch='${BRANCH_NAME}', timestamp=${DATE_STAMP}, hash of concurrency group)"
215228
216-
python "${{ github.workspace }}/dbt-data-reliability/integration_tests/profiles/generate_profiles.py" \
217-
--template "${{ github.workspace }}/dbt-data-reliability/integration_tests/profiles/profiles.yml.j2" \
229+
python "$GITHUB_WORKSPACE/dbt-data-reliability/integration_tests/profiles/generate_profiles.py" \
230+
--template "$GITHUB_WORKSPACE/dbt-data-reliability/integration_tests/profiles/profiles.yml.j2" \
218231
--output ~/.dbt/profiles.yml \
219232
--schema-name "$SCHEMA_NAME"
220233
221234
- name: Install dependencies
222235
working-directory: ${{ env.TESTS_DIR }}
223236
run: |
224-
${{ (inputs.dbt-version == 'fusion' && '~/.local/bin/dbt') || 'dbt' }} deps --project-dir dbt_project
225-
ln -sfn ${{ github.workspace }}/dbt-data-reliability dbt_project/dbt_packages/elementary
237+
if [ "$DBT_VERSION" = "fusion" ]; then DBT_BIN="$HOME/.local/bin/dbt"; else DBT_BIN="dbt"; fi
238+
"$DBT_BIN" deps --project-dir dbt_project
239+
ln -sfn "$GITHUB_WORKSPACE/dbt-data-reliability" dbt_project/dbt_packages/elementary
226240
pip install -r requirements.txt
227241
228242
- name: Start Vertica
@@ -240,15 +254,23 @@ jobs:
240254
- name: Check DWH connection
241255
working-directory: ${{ env.TESTS_DIR }}
242256
run: |
243-
${{ (inputs.dbt-version == 'fusion' && '~/.local/bin/dbt') || 'dbt' }} debug -t "${{ inputs.warehouse-type }}"
257+
if [ "$DBT_VERSION" = "fusion" ]; then DBT_BIN="$HOME/.local/bin/dbt"; else DBT_BIN="dbt"; fi
258+
"$DBT_BIN" debug -t "$WAREHOUSE"
244259
245260
- name: Test
246261
working-directory: "${{ env.TESTS_DIR }}/tests"
247-
run: py.test -n${{ (inputs.warehouse-type == 'spark' && '4') || '8' }} -vvv --target "${{ inputs.warehouse-type }}" --junit-xml=test-results.xml --html=detailed_report_${{ inputs.warehouse-type }}_dbt_${{ inputs.dbt-version }}.html --self-contained-html --clear-on-end ${{ (inputs.dbt-version == 'fusion' && '--runner-method fusion') || '' }}
262+
env:
263+
PYTEST_PARALLEL: ${{ (inputs.warehouse-type == 'spark' && '4') || '8' }}
264+
FUSION_RUNNER_FLAG: ${{ (inputs.dbt-version == 'fusion' && '--runner-method fusion') || '' }}
265+
run: |
266+
py.test -n"$PYTEST_PARALLEL" -vvv --target "$WAREHOUSE" \
267+
--junit-xml=test-results.xml \
268+
--html="detailed_report_${WAREHOUSE}_dbt_${DBT_VERSION}.html" \
269+
--self-contained-html --clear-on-end $FUSION_RUNNER_FLAG
248270
249271
- name: Upload test results
250272
if: always()
251-
uses: pmeier/pytest-results-action@v0.8.0
273+
uses: pmeier/pytest-results-action@0841ca7226ab155943837380769373a5dd14d7ed # v0.8.0
252274
with:
253275
path: ${{ env.TESTS_DIR }}/tests/test-results.xml
254276
summary: true
@@ -269,6 +291,7 @@ jobs:
269291
working-directory: ${{ env.TESTS_DIR }}
270292
continue-on-error: true
271293
run: |
272-
${{ (inputs.dbt-version == 'fusion' && '~/.local/bin/dbt') || 'dbt' }} run-operation elementary_tests.drop_test_schemas \
294+
if [ "$DBT_VERSION" = "fusion" ]; then DBT_BIN="$HOME/.local/bin/dbt"; else DBT_BIN="dbt"; fi
295+
"$DBT_BIN" run-operation elementary_tests.drop_test_schemas \
273296
--project-dir dbt_project \
274-
-t "${{ inputs.warehouse-type }}"
297+
-t "$WAREHOUSE"

0 commit comments

Comments
 (0)