diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eb364e05..a48bad5f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -93,12 +93,18 @@ Tox will take care of installing the dependencies for each environment, so you d tox -e integration_snowflake # For the Snowflake tests tox -e integration_databricks # For the Databricks tests tox -e integration_bigquery # For the BigQuery tests + tox -e integration_redshift # For the Redshift tests ``` The Spark tests require installing the [ODBC driver](https://www.databricks.com/spark/odbc-drivers-download). On a Mac, DBT_ENV_SPARK_DRIVER_PATH should be set to `/Library/simba/spark/lib/libsparkodbc_sbu.dylib`. Spark tests have not yet been added to the integration tests. +The Redshift tests require your AWS credentials configured in the current environment (either as environment variables or in your credentials +file - see [Configure the AWS cli](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html)). They are currently configured with IAM +authorisation, so your principal will require the redshift:getClusterCredentials permission to retrieve the password for the specified redshift +database user. + If you don't have access to a particular database type, this isn't a problem. Test on the one you do have, and let us know in the PR. #### SQLFluff diff --git a/README.md b/README.md index 3d60ba20..26ce21eb 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ The package currently supports - Postgres :white_check_mark: - SQL Server :white_check_mark: - Trino :white_check_mark: +- Redshift ✅ Models included: diff --git a/integration_test_project/example-env.sh b/integration_test_project/example-env.sh index 47cb0d67..859cb0fc 100755 --- a/integration_test_project/example-env.sh +++ b/integration_test_project/example-env.sh @@ -16,6 +16,10 @@ export DBT_ENV_SECRET_DATABRICKS_TOKEN= export DBT_ENV_SECRET_GCP_PROJECT= export DBT_ENV_SPARK_DRIVER_PATH= # /Library/simba/spark/lib/libsparkodbc_sbu.dylib on a Mac export DBT_ENV_SPARK_ENDPOINT= # The endpoint ID from the Databricks HTTP path +export DBT_ENV_SECRET_REDSHIFT_HOST= +export DBT_ENV_SECRET_REDSHIFT_CLUSTER_ID= +export DBT_ENV_SECRET_REDSHIFT_DB= +export DBT_ENV_SECRET_REDSHIFT_USER= # dbt environment variables, change these export DBT_VERSION="1_5_0" diff --git a/integration_test_project/profiles.yml b/integration_test_project/profiles.yml index 84d85ddc..3e7cf2ab 100644 --- a/integration_test_project/profiles.yml +++ b/integration_test_project/profiles.yml @@ -63,6 +63,17 @@ dbt_artifacts: trust_cert: True Encrypt: False user: dbt + password: "123" + redshift: + type: redshift + method: iam + threads: 8 + host: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_HOST') }}" + port: 5439 + dbname: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_DB') }}" + user: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_USER') }}" + schema: dbt_artifacts_test_commit_{{ env_var('DBT_VERSION', '') }}_{{ env_var('GITHUB_SHA_OVERRIDE', '') if env_var('GITHUB_SHA_OVERRIDE', '') else env_var('GITHUB_SHA') }} + cluster_id: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_CLUSTER_ID') }}" password: "123Administrator" trino: type: trino diff --git a/macros/_macros.yml b/macros/_macros.yml index d13edb00..0fc89ee3 100644 --- a/macros/_macros.yml +++ b/macros/_macros.yml @@ -57,6 +57,10 @@ macros: description: | Dependent on the adapter type, returns the native type for storing JSON. + - name: type_string + description: | + Dependent on the adapter type, returns the native type for storing a string. + ## MIGRATION ## - name: migrate_from_v0_to_v1 description: | diff --git a/macros/database_specific_helpers/type_helpers.sql b/macros/database_specific_helpers/type_helpers.sql index cbc19e5f..e553377e 100644 --- a/macros/database_specific_helpers/type_helpers.sql +++ b/macros/database_specific_helpers/type_helpers.sql @@ -26,10 +26,14 @@ json {% endmacro %} +{% macro redshift__type_json() %} + super +{% endmacro %} + {#- ARRAY -#} {% macro type_array() %} - {{ return(adapter.dispatch('type_array', 'dbt_artifacts')()) }} + {{ return(adapter.dispatch('type_array', 'dbt_artifacts')()) }} {% endmacro %} {% macro default__type_array() %} @@ -44,6 +48,22 @@ array {% endmacro %} +{% macro redshift__type_array() %} + super +{% endmacro %} + +{% macro type_string() %} + {{ return(adapter.dispatch('type_string', 'dbt_artifacts')()) }} +{% endmacro %} + +{% macro default__type_string() %} + {{ return(api.Column.translate_type("string")) }} +{% endmacro %} + +{% macro redshift__type_string() %} + varchar(max) +{% endmacro %} + {% macro trino__type_array() %} array(varchar) {% endmacro %} diff --git a/tox.ini b/tox.ini index 92803151..d04cc4b2 100644 --- a/tox.ini +++ b/tox.ini @@ -68,6 +68,9 @@ profiles_dir = integration_test_project [testenv] passenv = + AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY + AWS_SESSION_TOKEN DBT_PROFILES_DIR GITHUB_SHA_OVERRIDE GITHUB_SHA @@ -84,6 +87,10 @@ passenv = DBT_ENV_SECRET_GCP_PROJECT DBT_ENV_SPARK_DRIVER_PATH DBT_ENV_SPARK_ENDPOINT + DBT_ENV_SECRET_REDSHIFT_HOST + DBT_ENV_SECRET_REDSHIFT_CLUSTER_ID + DBT_ENV_SECRET_REDSHIFT_DB + DBT_ENV_SECRET_REDSHIFT_USER GOOGLE_APPLICATION_CREDENTIALS DBT_CLOUD_PROJECT_ID DBT_CLOUD_JOB_ID @@ -304,6 +311,15 @@ commands = dbt deps dbt build --target bigquery --vars '"my_var": "my value"' +# Redshift integration test +[testenv:integration_test_redshift] +changedir = integration_test_project +deps = dbt-redshift~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target redshift --vars '"my_var": "my value"' + [testenv:integration_bigquery_1_9_0] changedir = integration_test_project deps = dbt-bigquery~=1.9.0