change distinct percent since 30 days
change distinct percent since 30 days checks
Description
Column-level check that ensures that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.
profile change distinct percent since 30 days
Check description
Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.
Check name | Check type | Time scale | Quality dimension | Sensor definition | Quality rule |
---|---|---|---|---|---|
profile_change_distinct_percent_since_30_days | profiling | Consistency | distinct_percent | change_percent_30_days |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=schema_name.table_name -ch=profile_change_distinct_percent_since_30_days
dqo> check run -c=connection_name -t=schema_name.table_name -col=column_name -ch=profile_change_distinct_percent_since_30_days
profiling_checks:
uniqueness:
profile_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
The sample schema_name.table_name.dqotable.yaml file with the check configured is shown below.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
profiling_checks:
uniqueness:
profile_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS time_period,
CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
Expand the Configure with data grouping section to see additional examples for configuring this data quality checks to use data grouping (GROUP BY).
Configuration with data grouping
Sample configuration with data grouping enabled (YAML)
The sample below shows how to configure the data grouping and how it affects the generated SQL query.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
profiling_checks:
uniqueness:
profile_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table.grouping_level_1,
analyzed_table.grouping_level_2
,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
original_table."country" AS grouping_level_1,
original_table."state" AS grouping_level_2,
TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS time_period,
CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
,
daily change distinct percent since 30 days
Check description
Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.
Check name | Check type | Time scale | Quality dimension | Sensor definition | Quality rule |
---|---|---|---|---|---|
daily_change_distinct_percent_since_30_days | monitoring | daily | Consistency | distinct_percent | change_percent_30_days |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=schema_name.table_name -ch=daily_change_distinct_percent_since_30_days
dqo> check run -c=connection_name -t=schema_name.table_name -col=column_name -ch=daily_change_distinct_percent_since_30_days
monitoring_checks:
daily:
uniqueness:
daily_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
The sample schema_name.table_name.dqotable.yaml file with the check configured is shown below.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
monitoring_checks:
daily:
uniqueness:
daily_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS time_period,
CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
Expand the Configure with data grouping section to see additional examples for configuring this data quality checks to use data grouping (GROUP BY).
Configuration with data grouping
Sample configuration with data grouping enabled (YAML)
The sample below shows how to configure the data grouping and how it affects the generated SQL query.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
monitoring_checks:
daily:
uniqueness:
daily_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table.grouping_level_1,
analyzed_table.grouping_level_2
,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
original_table."country" AS grouping_level_1,
original_table."state" AS grouping_level_2,
TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS time_period,
CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(LOCALTIMESTAMP AS date) AS time_period,
CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
,
monthly change distinct percent since 30 days
Check description
Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.
Check name | Check type | Time scale | Quality dimension | Sensor definition | Quality rule |
---|---|---|---|---|---|
monthly_change_distinct_percent_since_30_days | monitoring | monthly | Consistency | distinct_percent | change_percent_30_days |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=schema_name.table_name -ch=monthly_change_distinct_percent_since_30_days
dqo> check run -c=connection_name -t=schema_name.table_name -col=column_name -ch=monthly_change_distinct_percent_since_30_days
monitoring_checks:
monthly:
uniqueness:
monthly_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
The sample schema_name.table_name.dqotable.yaml file with the check configured is shown below.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
monitoring_checks:
monthly:
uniqueness:
monthly_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS time_period,
CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
Expand the Configure with data grouping section to see additional examples for configuring this data quality checks to use data grouping (GROUP BY).
Configuration with data grouping
Sample configuration with data grouping enabled (YAML)
The sample below shows how to configure the data grouping and how it affects the generated SQL query.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
monitoring_checks:
monthly:
uniqueness:
monthly_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table.grouping_level_1,
analyzed_table.grouping_level_2
,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
original_table."country" AS grouping_level_1,
original_table."state" AS grouping_level_2,
TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS time_period,
CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
,
daily partition change distinct percent since 30 days
Check description
Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.
Check name | Check type | Time scale | Quality dimension | Sensor definition | Quality rule |
---|---|---|---|---|---|
daily_partition_change_distinct_percent_since_30_days | partitioned | daily | Consistency | distinct_percent | change_percent_30_days |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=schema_name.table_name -ch=daily_partition_change_distinct_percent_since_30_days
dqo> check run -c=connection_name -t=schema_name.table_name -col=column_name -ch=daily_partition_change_distinct_percent_since_30_days
partitioned_checks:
daily:
uniqueness:
daily_partition_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
The sample schema_name.table_name.dqotable.yaml file with the check configured is shown below.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
partition_by_column: date_column
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
partitioned_checks:
daily:
uniqueness:
daily_partition_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
date_column:
labels:
- "date or datetime column used as a daily or monthly partitioning key, dates\
\ (and times) are truncated to a day or a month by the sensor's query for\
\ partitioned checks"
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
CAST(analyzed_table.`date_column` AS DATE) AS time_period,
TIMESTAMP(CAST(analyzed_table.`date_column` AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-%d 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
TRUNC(CAST(original_table."date_column" AS DATE)) AS time_period,
CAST(TRUNC(CAST(original_table."date_column" AS DATE)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
CAST(analyzed_table."date_column" AS date) AS time_period,
CAST((CAST(analyzed_table."date_column" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
CAST(analyzed_table."date_column" AS date) AS time_period,
CAST((CAST(analyzed_table."date_column" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
CAST(analyzed_table."date_column" AS date) AS time_period,
TO_TIMESTAMP(CAST(analyzed_table."date_column" AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
CAST(analyzed_table.[date_column] AS date) AS time_period,
CAST((CAST(analyzed_table.[date_column] AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date)
ORDER BY CAST(analyzed_table.[date_column] AS date)
Expand the Configure with data grouping section to see additional examples for configuring this data quality checks to use data grouping (GROUP BY).
Configuration with data grouping
Sample configuration with data grouping enabled (YAML)
The sample below shows how to configure the data grouping and how it affects the generated SQL query.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
partition_by_column: date_column
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
partitioned_checks:
daily:
uniqueness:
daily_partition_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
date_column:
labels:
- "date or datetime column used as a daily or monthly partitioning key, dates\
\ (and times) are truncated to a day or a month by the sensor's query for\
\ partitioned checks"
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
CAST(analyzed_table.`date_column` AS DATE) AS time_period,
TIMESTAMP(CAST(analyzed_table.`date_column` AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-%d 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table.grouping_level_1,
analyzed_table.grouping_level_2
,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
original_table."country" AS grouping_level_1,
original_table."state" AS grouping_level_2,
TRUNC(CAST(original_table."date_column" AS DATE)) AS time_period,
CAST(TRUNC(CAST(original_table."date_column" AS DATE)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(analyzed_table."date_column" AS date) AS time_period,
CAST((CAST(analyzed_table."date_column" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(analyzed_table."date_column" AS date) AS time_period,
CAST((CAST(analyzed_table."date_column" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
CAST(analyzed_table."date_column" AS date) AS time_period,
TO_TIMESTAMP(CAST(analyzed_table."date_column" AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
CAST(analyzed_table.[date_column] AS date) AS time_period,
CAST((CAST(analyzed_table.[date_column] AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date)
ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date)
monthly partition change distinct percent since 30 days
Check description
Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.
Check name | Check type | Time scale | Quality dimension | Sensor definition | Quality rule |
---|---|---|---|---|---|
monthly_partition_change_distinct_percent_since_30_days | partitioned | monthly | Consistency | distinct_percent | change_percent_30_days |
Enable check (Shell)
To enable this check provide connection name and check name in check enable command
To run this check provide check name in check run command It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=schema_name.table_name -ch=monthly_partition_change_distinct_percent_since_30_days
dqo> check run -c=connection_name -t=schema_name.table_name -col=column_name -ch=monthly_partition_change_distinct_percent_since_30_days
partitioned_checks:
monthly:
uniqueness:
monthly_partition_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
The sample schema_name.table_name.dqotable.yaml file with the check configured is shown below.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
partition_by_column: date_column
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
columns:
target_column:
partitioned_checks:
monthly:
uniqueness:
monthly_partition_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
date_column:
labels:
- "date or datetime column used as a daily or monthly partitioning key, dates\
\ (and times) are truncated to a day or a month by the sensor's query for\
\ partitioned checks"
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_TRUNC(CAST(analyzed_table.`date_column` AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(analyzed_table.`date_column` AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-01 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
TRUNC(CAST(original_table."date_column" AS DATE), 'MONTH') AS time_period,
CAST(TRUNC(CAST(original_table."date_column" AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period,
CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0)
ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)
Expand the Configure with data grouping section to see additional examples for configuring this data quality checks to use data grouping (GROUP BY).
Configuration with data grouping
Sample configuration with data grouping enabled (YAML)
The sample below shows how to configure the data grouping and how it affects the generated SQL query.
# yaml-language-server: $schema=https://cloud.dqops.com/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
timestamp_columns:
event_timestamp_column: col_event_timestamp
ingestion_timestamp_column: col_inserted_at
partition_by_column: date_column
incremental_time_window:
daily_partitioning_recent_days: 7
monthly_partitioning_recent_months: 1
default_grouping_name: group_by_country_and_state
groupings:
group_by_country_and_state:
level_1:
source: column_value
column: country
level_2:
source: column_value
column: state
columns:
target_column:
partitioned_checks:
monthly:
uniqueness:
monthly_partition_change_distinct_percent_since_30_days:
warning:
max_percent: 5.0
exact_day: false
error:
max_percent: 5.0
exact_day: false
fatal:
max_percent: 5.0
exact_day: false
labels:
- This is the column that is analyzed for data quality issues
col_event_timestamp:
labels:
- optional column that stores the timestamp when the event/transaction happened
col_inserted_at:
labels:
- optional column that stores the timestamp when row was ingested
date_column:
labels:
- "date or datetime column used as a daily or monthly partitioning key, dates\
\ (and times) are truncated to a day or a month by the sensor's query for\
\ partitioned checks"
country:
labels:
- column used as the first grouping key
state:
labels:
- column used as the second grouping key
Please expand the database engine name section to see the SQL query rendered by a Jinja2 template for the distinct_percent sensor.
BigQuery
{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_TRUNC(CAST(analyzed_table.`date_column` AS DATE), MONTH) AS time_period,
TIMESTAMP(DATE_TRUNC(CAST(analyzed_table.`date_column` AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
MySQL
{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table.`target_column`) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table.`target_column`) / COUNT(analyzed_table.`target_column`)
END AS actual_value,
analyzed_table.`country` AS grouping_level_1,
analyzed_table.`state` AS grouping_level_2,
DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-01 00:00:00') AS time_period,
FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(analyzed_table.`date_column`, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Oracle
{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections_reference('analyzed_table') }}
{{- lib.render_time_dimension_projection_reference('analyzed_table') }}
FROM (
SELECT
original_table.*
{{- lib.render_data_grouping_projections('original_table') }}
{{- lib.render_time_dimension_projection('original_table') }}
FROM {{ lib.render_target_table() }} original_table
{{- lib.render_where_clause(table_alias_prefix='original_table') }}
) analyzed_table
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table.grouping_level_1,
analyzed_table.grouping_level_2
,
time_period,
time_period_utc
FROM (
SELECT
original_table.*,
original_table."country" AS grouping_level_1,
original_table."state" AS grouping_level_2,
TRUNC(CAST(original_table."date_column" AS DATE), 'MONTH') AS time_period,
CAST(TRUNC(CAST(original_table."date_column" AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "<target_schema>"."<target_table>" original_table
) analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
PostgreSQL
{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Redshift
{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period,
CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
Snowflake
{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT(analyzed_table."target_column") = 0
THEN 100.0
ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") / COUNT(analyzed_table."target_column")
END AS actual_value,
analyzed_table."country" AS grouping_level_1,
analyzed_table."state" AS grouping_level_2,
DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period,
TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc
SQL Server
{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
CASE
WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table')}}) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT {{ lib.render_target_column('analyzed_table') }}) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }})
END AS actual_value
{{- lib.render_data_grouping_projections('analyzed_table') }}
{{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
CASE
WHEN COUNT_BIG(analyzed_table.[target_column]) = 0
THEN 100.0
ELSE 100.0 * COUNT_BIG(DISTINCT analyzed_table.[target_column]) / COUNT_BIG(analyzed_table.[target_column])
END AS actual_value,
analyzed_table.[country] AS grouping_level_1,
analyzed_table.[state] AS grouping_level_2,
DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period,
CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state], DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0)
ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)