foreign key match percent
foreign key match percent checks
Description
Column level check that ensures that there are no more than a minimum percentage of values matching values in another table column.
profile foreign key match percent
Check description
Verifies that the percentage of values in a column that matches values in another table column does not exceed the set count.
Check name | Check type | Time scale | Sensor definition | Quality rule |
---|---|---|---|---|
profile_foreign_key_match_percent | profiling | foreign_key_match_percent | min_percent |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=profile_foreign_key_match_percent
profiling_checks:
integrity:
profile_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
profiling_checks:
integrity:
profile_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
Configuration with data grouping
Click to see more
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
profiling_checks:
integrity:
profile_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
,
daily foreign key match percent
Check description
Verifies that the percentage of values in a column that matches values in another table column does not exceed the set count. Stores the most recent captured value for each day when the data quality check was evaluated.
Check name | Check type | Time scale | Sensor definition | Quality rule |
---|---|---|---|---|
daily_foreign_key_match_percent | recurring | daily | foreign_key_match_percent | min_percent |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=daily_foreign_key_match_percent
recurring_checks:
daily:
integrity:
daily_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
recurring_checks:
daily:
integrity:
daily_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
Configuration with data grouping
Click to see more
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
recurring_checks:
daily:
integrity:
daily_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
,
monthly foreign key match percent
Check description
Verifies that the percentage of values in a column that matches values in another table column does not exceed the set count. Stores the most recent row count for each month when the data quality check was evaluated.
Check name | Check type | Time scale | Sensor definition | Quality rule |
---|---|---|---|---|
monthly_foreign_key_match_percent | recurring | monthly | foreign_key_match_percent | min_percent |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=monthly_foreign_key_match_percent
recurring_checks:
monthly:
integrity:
monthly_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
recurring_checks:
monthly:
integrity:
monthly_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
Configuration with data grouping
Click to see more
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
recurring_checks:
monthly:
integrity:
monthly_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
,
daily partition foreign key match percent
Check description
Verifies that the percentage of values in a column that matches values in another table column does not exceed the set count. Creates a separate data quality check (and an alert) for each daily partition.
Check name | Check type | Time scale | Sensor definition | Quality rule |
---|---|---|---|---|
daily_partition_foreign_key_match_percent | partitioned | daily | foreign_key_match_percent | min_percent |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=daily_partition_foreign_key_match_percent
partitioned_checks:
daily:
integrity:
daily_partition_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
partitioned_checks:
daily:
integrity:
daily_partition_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(analyzed_table.`` AS DATE) AS time_period,
TIMESTAMP(CAST(analyzed_table.`` AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(analyzed_table."" AS date) AS time_period,
CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(analyzed_table."" AS date) AS time_period,
CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(analyzed_table."" AS date) AS time_period,
TO_TIMESTAMP(CAST(analyzed_table."" AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
CAST(analyzed_table.[] AS date) AS time_period,
CAST((CAST(analyzed_table.[] AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
GROUP BY CAST(analyzed_table.[] AS date), CAST(analyzed_table.[] AS date)
ORDER BY CAST(analyzed_table.[] AS date)
Configuration with data grouping
Click to see more
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
partitioned_checks:
daily:
integrity:
daily_partition_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
CAST(analyzed_table.`` AS DATE) AS time_period,
TIMESTAMP(CAST(analyzed_table.`` AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(analyzed_table."" AS date) AS time_period,
CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(analyzed_table."" AS date) AS time_period,
CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(analyzed_table."" AS date) AS time_period,
TO_TIMESTAMP(CAST(analyzed_table."" AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
CAST(analyzed_table.[] AS date) AS time_period,
CAST((CAST(analyzed_table.[] AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[] AS date), CAST(analyzed_table.[] AS date)
ORDER BY level_1, level_2CAST(analyzed_table.[] AS date)
monthly partition foreign key match percent
Check description
Verifies that the percentage of values in a column that matches values in another table column does not exceed the set count. Creates a separate data quality check (and an alert) for each monthly partition.
Check name | Check type | Time scale | Sensor definition | Quality rule |
---|---|---|---|---|
monthly_partition_foreign_key_match_percent | partitioned | monthly | foreign_key_match_percent | min_percent |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=monthly_partition_foreign_key_match_percent
partitioned_checks:
monthly:
integrity:
monthly_partition_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
partitioned_checks:
monthly:
integrity:
monthly_partition_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC(CAST(analyzed_table.`` AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(analyzed_table.`` AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1) AS time_period,
CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[]), 0)
ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1)
Configuration with data grouping
Click to see more
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
partitioned_checks:
monthly:
integrity:
monthly_partition_foreign_key_match_percent:
parameters:
foreign_table: dim_customer
foreign_column: customer_id
warning:
min_percent: 100.0
error:
min_percent: 99.0
fatal:
min_percent: 95.0
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_project_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.`customer_id` IS NULL AND analyzed_table.`target_column` IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_TRUNC(CAST(analyzed_table.`` AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(analyzed_table.`` AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
LEFT OUTER JOIN `your-google-project-id`.`<target_schema>`.`dim_customer` AS foreign_table
ON analyzed_table.`target_column` = foreign_table.`customer_id`
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_postgresql_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_redshift_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
LEFT OUTER JOIN "your_snowflake_database"."<target_schema>"."dim_customer" AS foreign_table
ON analyzed_table."target_column" = foreign_table."customer_id"
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
{%- macro render_foreign_table(foreign_table) -%}
{%- if foreign_table.find(".") < 0 -%}
{{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(foreign_table) -}}
{%- else -%}
{{ foreign_table }}
{%- endif -%}
{%- endmacro -%}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
LEFT OUTER JOIN {{ render_foreign_table(parameters.foreign_table) }} AS foreign_table
ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }}
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
100.0 * SUM(
CASE
WHEN foreign_table.[customer_id] IS NULL AND analyzed_table.[target_column] IS NOT NULL
THEN 0
ELSE 1
END
) / COUNT(*) AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1) AS time_period,
CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
LEFT OUTER JOIN [your_sql_server_database].[<target_schema>].[dim_customer] AS foreign_table
ON analyzed_table.[target_column] = foreign_table.[customer_id]
GROUP BY analyzed_table.[country], analyzed_table.[state], DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[]), 0)
ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[] AS date)), MONTH(CAST(analyzed_table.[] AS date)), 1)