Skip to content

sum anomaly differencing

sum anomaly differencing checks

Description
Column level check that ensures that the sum in a monitored column is within a two-tailed percentile from measurements made during the last 90 days. Use in partitioned checks.


profile sum anomaly differencing

Check description
Verifies that the sum in a column changes in a rate within a percentile boundary during last 90 days.

Check name Check type Time scale Sensor definition Quality rule
profile_sum_anomaly_differencing profiling sum anomaly_differencing_percentile_moving_average

Enable check (Shell)
To enable this check provide connection name and check name in check enable command

dqo> check enable -c=connection_name -ch=profile_sum_anomaly_differencing
Run check (Shell)
To run this check provide check name in check run command
dqo> check run -ch=profile_sum_anomaly_differencing
It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below
dqo> check run -c=connection_name -ch=profile_sum_anomaly_differencing
It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=table_name -ch=profile_sum_anomaly_differencing
It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=profile_sum_anomaly_differencing
Check structure (Yaml)
      profiling_checks:
        anomaly:
          profile_sum_anomaly_differencing:
            warning:
              anomaly_percent: 0.1
            error:
              anomaly_percent: 0.1
            fatal:
              anomaly_percent: 0.1
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  columns:
    target_column:
      profiling_checks:
        anomaly:
          profile_sum_anomaly_differencing:
            warning:
              anomaly_percent: 0.1
            error:
              anomaly_percent: 0.1
            fatal:
              anomaly_percent: 0.1
      labels:
      - This is the column that is analyzed for data quality issues
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested

BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
    TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Oracle

{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections_reference('analyzed_table') }}
     {{- lib.render_time_dimension_projection_reference('analyzed_table') }}
 FROM(
     SELECT
         original_table.*
         {{- lib.render_data_grouping_projections('original_table') }}
         {{- lib.render_time_dimension_projection('original_table') }}
     FROM {{ lib.render_target_table() }} original_table
     {{- lib.render_where_clause(table_alias_prefix='original_table') }}
 ) analyzed_table
 {{- lib.render_group_by() -}}
 {{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    time_period,
    time_period_utc
 FROM(
     SELECT
         original_table.*,
    TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS time_period,
    CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
     FROM "<target_schema>"."<target_table>" original_table
 ) analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
    TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.[target_column]) AS actual_value,
    DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
    CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table

Configuration with data grouping

Click to see more

Sample configuration (Yaml)

# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  default_grouping_name: group_by_country_and_state
  groupings:
    group_by_country_and_state:
      level_1:
        source: column_value
        column: country
      level_2:
        source: column_value
        column: state
  columns:
    target_column:
      profiling_checks:
        anomaly:
          profile_sum_anomaly_differencing:
            warning:
              anomaly_percent: 0.1
            error:
              anomaly_percent: 0.1
            fatal:
              anomaly_percent: 0.1
      labels:
      - This is the column that is analyzed for data quality issues
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested
    country:
      labels:
      - column used as the first grouping key
    state:
      labels:
      - column used as the second grouping key
BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
    TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Oracle

{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections_reference('analyzed_table') }}
     {{- lib.render_time_dimension_projection_reference('analyzed_table') }}
 FROM(
     SELECT
         original_table.*
         {{- lib.render_data_grouping_projections('original_table') }}
         {{- lib.render_time_dimension_projection('original_table') }}
     FROM {{ lib.render_target_table() }} original_table
     {{- lib.render_where_clause(table_alias_prefix='original_table') }}
 ) analyzed_table
 {{- lib.render_group_by() -}}
 {{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,

                analyzed_table.grouping_level_1,

                analyzed_table.grouping_level_2
,
    time_period,
    time_period_utc
 FROM(
     SELECT
         original_table.*,
    original_table."country" AS grouping_level_1,
    original_table."state" AS grouping_level_2,
    TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS time_period,
    CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE), 'MONTH') AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
     FROM "<target_schema>"."<target_table>" original_table
 ) analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
    TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.[target_column]) AS actual_value,
    analyzed_table.[country] AS grouping_level_1,
    analyzed_table.[state] AS grouping_level_2,
    DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
    CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
        , 

daily sum anomaly differencing

Check description
Verifies that the sum in a column changes in a rate within a percentile boundary during last 90 days.

Check name Check type Time scale Sensor definition Quality rule
daily_sum_anomaly_differencing recurring daily sum anomaly_differencing_percentile_moving_average

Enable check (Shell)
To enable this check provide connection name and check name in check enable command

dqo> check enable -c=connection_name -ch=daily_sum_anomaly_differencing
Run check (Shell)
To run this check provide check name in check run command
dqo> check run -ch=daily_sum_anomaly_differencing
It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below
dqo> check run -c=connection_name -ch=daily_sum_anomaly_differencing
It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=table_name -ch=daily_sum_anomaly_differencing
It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=daily_sum_anomaly_differencing
Check structure (Yaml)
      recurring_checks:
        daily:
          anomaly:
            daily_sum_anomaly_differencing:
              warning:
                anomaly_percent: 0.1
              error:
                anomaly_percent: 0.1
              fatal:
                anomaly_percent: 0.1
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  columns:
    target_column:
      recurring_checks:
        daily:
          anomaly:
            daily_sum_anomaly_differencing:
              warning:
                anomaly_percent: 0.1
              error:
                anomaly_percent: 0.1
              fatal:
                anomaly_percent: 0.1
      labels:
      - This is the column that is analyzed for data quality issues
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested

BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
    TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Oracle

{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections_reference('analyzed_table') }}
     {{- lib.render_time_dimension_projection_reference('analyzed_table') }}
 FROM(
     SELECT
         original_table.*
         {{- lib.render_data_grouping_projections('original_table') }}
         {{- lib.render_time_dimension_projection('original_table') }}
     FROM {{ lib.render_target_table() }} original_table
     {{- lib.render_where_clause(table_alias_prefix='original_table') }}
 ) analyzed_table
 {{- lib.render_group_by() -}}
 {{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    time_period,
    time_period_utc
 FROM(
     SELECT
         original_table.*,
    TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS time_period,
    CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
     FROM "<target_schema>"."<target_table>" original_table
 ) analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
    TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.[target_column]) AS actual_value,
    CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
    CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table

Configuration with data grouping

Click to see more

Sample configuration (Yaml)

# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  default_grouping_name: group_by_country_and_state
  groupings:
    group_by_country_and_state:
      level_1:
        source: column_value
        column: country
      level_2:
        source: column_value
        column: state
  columns:
    target_column:
      recurring_checks:
        daily:
          anomaly:
            daily_sum_anomaly_differencing:
              warning:
                anomaly_percent: 0.1
              error:
                anomaly_percent: 0.1
              fatal:
                anomaly_percent: 0.1
      labels:
      - This is the column that is analyzed for data quality issues
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested
    country:
      labels:
      - column used as the first grouping key
    state:
      labels:
      - column used as the second grouping key
BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
    TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.`target_column`) AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Oracle

{% import '/dialects/oracle.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections_reference('analyzed_table') }}
     {{- lib.render_time_dimension_projection_reference('analyzed_table') }}
 FROM(
     SELECT
         original_table.*
         {{- lib.render_data_grouping_projections('original_table') }}
         {{- lib.render_time_dimension_projection('original_table') }}
     FROM {{ lib.render_target_table() }} original_table
     {{- lib.render_where_clause(table_alias_prefix='original_table') }}
 ) analyzed_table
 {{- lib.render_group_by() -}}
 {{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,

                analyzed_table.grouping_level_1,

                analyzed_table.grouping_level_2
,
    time_period,
    time_period_utc
 FROM(
     SELECT
         original_table.*,
    original_table."country" AS grouping_level_1,
    original_table."state" AS grouping_level_2,
    TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS time_period,
    CAST(TRUNC(CAST(CURRENT_TIMESTAMP AS DATE)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
     FROM "<target_schema>"."<target_table>" original_table
 ) analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table."target_column") AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
    TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}
SELECT
    SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    SUM(analyzed_table.[target_column]) AS actual_value,
    analyzed_table.[country] AS grouping_level_1,
    analyzed_table.[state] AS grouping_level_2,
    CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
    CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
        ,