Skip to content

Commit

Permalink
Merge pull request #3204 from fishtown-analytics/updates/tox
Browse files Browse the repository at this point in the history
dev env clean up and improvements
  • Loading branch information
Kyle Wigley authored Mar 29, 2021
2 parents 0335960 + c04d1e9 commit ce30dfa
Show file tree
Hide file tree
Showing 22 changed files with 322 additions and 1,991 deletions.
181 changes: 43 additions & 138 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,19 @@ version: 2.1
jobs:
unit:
docker: &test_only
- image: fishtownanalytics/test-container:9
- image: fishtownanalytics/test-container:11
environment:
DBT_INVOCATION_ENV: circle
DOCKER_TEST_DATABASE_HOST: "database"
TOX_PARALLEL_NO_SPINNER: 1
steps:
- checkout
- run: tox -e flake8,mypy,unit-py36,unit-py38
- run: tox -p -e py36,py37,py38
lint:
docker: *test_only
steps:
- checkout
- run: tox -e mypy,flake8 -- -v
build-wheels:
docker: *test_only
steps:
Expand All @@ -19,7 +26,7 @@ jobs:
export PYTHON_BIN="${PYTHON_ENV}/bin/python"
$PYTHON_BIN -m pip install -U pip setuptools
$PYTHON_BIN -m pip install -r requirements.txt
$PYTHON_BIN -m pip install -r dev_requirements.txt
$PYTHON_BIN -m pip install -r dev-requirements.txt
/bin/bash ./scripts/build-wheels.sh
$PYTHON_BIN ./scripts/collect-dbt-contexts.py > ./dist/context_metadata.json
$PYTHON_BIN ./scripts/collect-artifact-schema.py > ./dist/artifact_schemas.json
Expand All @@ -28,20 +35,22 @@ jobs:
- store_artifacts:
path: ./dist
destination: dist
integration-postgres-py36:
docker: &test_and_postgres
- image: fishtownanalytics/test-container:9
integration-postgres:
docker:
- image: fishtownanalytics/test-container:11
environment:
DBT_INVOCATION_ENV: circle
DOCKER_TEST_DATABASE_HOST: "database"
TOX_PARALLEL_NO_SPINNER: 1
- image: postgres
name: database
environment: &pgenv
environment:
POSTGRES_USER: "root"
POSTGRES_PASSWORD: "password"
POSTGRES_DB: "dbt"
steps:
- checkout
- run: &setupdb
- run:
name: Setup postgres
command: bash test/setup_db.sh
environment:
Expand All @@ -50,169 +59,65 @@ jobs:
PGPASSWORD: password
PGDATABASE: postgres
- run:
name: Run tests
command: tox -e integration-postgres-py36
- store_artifacts:
path: ./logs
integration-snowflake-py36:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-snowflake-py36
no_output_timeout: 1h
- store_artifacts:
path: ./logs
integration-redshift-py36:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-redshift-py36
- store_artifacts:
path: ./logs
integration-bigquery-py36:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-bigquery-py36
- store_artifacts:
path: ./logs
integration-postgres-py38:
docker: *test_and_postgres
steps:
- checkout
- run: *setupdb
- run:
name: Run tests
command: tox -e integration-postgres-py38
- store_artifacts:
path: ./logs
integration-snowflake-py38:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-snowflake-py38
no_output_timeout: 1h
- store_artifacts:
path: ./logs
integration-redshift-py38:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-redshift-py38
- store_artifacts:
path: ./logs
integration-bigquery-py38:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-bigquery-py38
- store_artifacts:
path: ./logs

integration-postgres-py39:
docker: *test_and_postgres
steps:
- checkout
- run: *setupdb
- run:
name: Run tests
command: tox -e integration-postgres-py39
name: Postgres integration tests
command: tox -p -e py36-postgres,py38-postgres -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs
integration-snowflake-py39:
integration-snowflake:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-snowflake-py39
no_output_timeout: 1h
name: Snowflake integration tests
command: tox -p -e py36-snowflake,py38-snowflake -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs
integration-redshift-py39:
integration-redshift:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-redshift-py39
name: Redshift integration tests
command: tox -p -e py36-redshift,py38-redshift -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs
integration-bigquery-py39:
integration-bigquery:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-bigquery-py39
name: Bigquery integration test
command: tox -p -e py36-bigquery,py38-bigquery -- -v -n4
no_output_timeout: 30m
- store_artifacts:
path: ./logs

workflows:
version: 2
test-everything:
jobs:
- lint
- unit
- integration-postgres-py36:
- integration-postgres:
requires:
- unit
- integration-redshift-py36:
requires:
- integration-postgres-py36
- integration-bigquery-py36:
requires:
- integration-postgres-py36
- integration-snowflake-py36:
requires:
- integration-postgres-py36
- integration-postgres-py38:
- integration-redshift:
requires:
- unit
- integration-redshift-py38:
requires:
- integration-postgres-py38
- integration-bigquery-py38:
requires:
- integration-postgres-py38
- integration-snowflake-py38:
requires:
- integration-postgres-py38
- integration-postgres-py39:
- integration-bigquery:
requires:
- unit
- integration-redshift-py39:
requires:
- integration-postgres-py39
- integration-bigquery-py39:
- integration-snowflake:
requires:
- integration-postgres-py39
# - integration-snowflake-py39:
# requires:
# - integration-postgres-py39
- unit
- build-wheels:
requires:
- lint
- unit
- integration-postgres-py36
- integration-redshift-py36
- integration-bigquery-py36
- integration-snowflake-py36
- integration-postgres-py38
- integration-redshift-py38
- integration-bigquery-py38
- integration-snowflake-py38
- integration-postgres-py39
- integration-redshift-py39
- integration-bigquery-py39
# - integration-snowflake-py39
- integration-postgres
- integration-redshift
- integration-bigquery
- integration-snowflake
4 changes: 3 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@
### Under the hood
- Add dependabot configuration for alerting maintainers about keeping dependencies up to date and secure. ([#3061](https:/fishtown-analytics/dbt/issues/3061), [#3062](https:/fishtown-analytics/dbt/pull/3062))
- Update script to collect and write json schema for dbt artifacts ([#2870](https:/fishtown-analytics/dbt/issues/2870), [#3065](https:/fishtown-analytics/dbt/pull/3065))
- Bump snowflake-connector-python and releated dependencies, support Python 3.9 ([#2985](https:/fishtown-analytics/dbt/issues/2985), [#3148](https:/fishtown-analytics/dbt/pull/3148))
- Bump `snowflake-connector-python` and releated dependencies, support Python 3.9 ([#2985](https:/fishtown-analytics/dbt/issues/2985), [#3148](https:/fishtown-analytics/dbt/pull/3148))
- General development environment clean up and improve experience running tests
locally ([#3194](https:/fishtown-analytics/dbt/issues/3194), [#3204](https:/fishtown-analytics/dbt/pull/3204))

Contributors:
- [@yu-iskw](https:/yu-iskw) ([#2928](https:/fishtown-analytics/dbt/pull/2928))
Expand Down
Loading

0 comments on commit ce30dfa

Please sign in to comment.