Skip to content

data staleness

data staleness checks

Description
Table level check that calculates the time difference between the last timestamp when any data was loaded into a table and the current time. This check could be used only when the data pipeline, a ETL process or a trigger in the data warehouse is filling an extra column with the timestamp when the data loading job was loaded. The ingestion column that is used for comparison is defined as the timestamp_columns.ingestion_timestamp_column on the table configuration. This check is also known as "Data Staleness".


profile data staleness

Check description
Calculates the time difference in days between the current date and the most recent data ingestion timestamp (staleness)

Check name Check type Time scale Sensor definition Quality rule
profile_data_staleness profiling data_staleness max_days

Enable check (Shell)
To enable this check provide connection name and check name in check enable command

dqo> check enable -c=connection_name -ch=profile_data_staleness
Run check (Shell)
To run this check provide check name in check run command
dqo> check run -ch=profile_data_staleness
It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below
dqo> check run -c=connection_name -ch=profile_data_staleness
It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=table_name -ch=profile_data_staleness
It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=profile_data_staleness
Check structure (Yaml)
  profiling_checks:
    timeliness:
      profile_data_staleness:
        warning:
          max_days: 1.0
        error:
          max_days: 2.0
        fatal:
          max_days: 1.0
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  profiling_checks:
    timeliness:
      profile_data_staleness:
        warning:
          max_days: 1.0
        error:
          max_days: 2.0
        fatal:
          max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested

BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
    TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
    TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
    CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table

Configuration with data grouping

Click to see more

Sample configuration (Yaml)

# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  default_grouping_name: group_by_country_and_state
  groupings:
    group_by_country_and_state:
      level_1:
        source: column_value
        column: country
      level_2:
        source: column_value
        column: state
  profiling_checks:
    timeliness:
      profile_data_staleness:
        warning:
          max_days: 1.0
        error:
          max_days: 2.0
        fatal:
          max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested
    country:
      labels:
      - column used as the first grouping key
    state:
      labels:
      - column used as the second grouping key
BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
    TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
    TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.[country] AS grouping_level_1,
    analyzed_table.[state] AS grouping_level_2,
    DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
    CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
        , 

daily data staleness

Check description
Daily calculating the time difference in days between the current date and the most recent data ingestion timestamp (staleness)

Check name Check type Time scale Sensor definition Quality rule
daily_data_staleness recurring daily data_staleness max_days

Enable check (Shell)
To enable this check provide connection name and check name in check enable command

dqo> check enable -c=connection_name -ch=daily_data_staleness
Run check (Shell)
To run this check provide check name in check run command
dqo> check run -ch=daily_data_staleness
It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below
dqo> check run -c=connection_name -ch=daily_data_staleness
It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=table_name -ch=daily_data_staleness
It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=daily_data_staleness
Check structure (Yaml)
  recurring_checks:
    daily:
      timeliness:
        daily_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  recurring_checks:
    daily:
      timeliness:
        daily_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested

BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
    TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
    TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
    CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table

Configuration with data grouping

Click to see more

Sample configuration (Yaml)

# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  default_grouping_name: group_by_country_and_state
  groupings:
    group_by_country_and_state:
      level_1:
        source: column_value
        column: country
      level_2:
        source: column_value
        column: state
  recurring_checks:
    daily:
      timeliness:
        daily_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested
    country:
      labels:
      - column used as the first grouping key
    state:
      labels:
      - column used as the second grouping key
BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    CAST(CURRENT_TIMESTAMP() AS DATE) AS time_period,
    TIMESTAMP(CAST(CURRENT_TIMESTAMP() AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(LOCALTIMESTAMP AS date) AS time_period,
    CAST((CAST(LOCALTIMESTAMP AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date) AS time_period,
    TO_TIMESTAMP(CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.[country] AS grouping_level_1,
    analyzed_table.[state] AS grouping_level_2,
    CAST(SYSDATETIMEOFFSET() AS date) AS time_period,
    CAST((CAST(SYSDATETIMEOFFSET() AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
        , 

monthly data staleness

Check description
Monthly recurring calculating the time difference in days between the current date and the most recent data ingestion timestamp (staleness)

Check name Check type Time scale Sensor definition Quality rule
monthly_data_staleness recurring monthly data_staleness max_days

Enable check (Shell)
To enable this check provide connection name and check name in check enable command

dqo> check enable -c=connection_name -ch=monthly_data_staleness
Run check (Shell)
To run this check provide check name in check run command
dqo> check run -ch=monthly_data_staleness
It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below
dqo> check run -c=connection_name -ch=monthly_data_staleness
It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=table_name -ch=monthly_data_staleness
It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=monthly_data_staleness
Check structure (Yaml)
  recurring_checks:
    monthly:
      timeliness:
        monthly_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  recurring_checks:
    monthly:
      timeliness:
        monthly_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested

BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
    TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
    TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
    CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table

Configuration with data grouping

Click to see more

Sample configuration (Yaml)

# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  default_grouping_name: group_by_country_and_state
  groupings:
    group_by_country_and_state:
      level_1:
        source: column_value
        column: country
      level_2:
        source: column_value
        column: state
  recurring_checks:
    monthly:
      timeliness:
        monthly_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested
    country:
      labels:
      - column used as the first grouping key
    state:
      labels:
      - column used as the second grouping key
BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH) AS time_period,
    TIMESTAMP(DATE_TRUNC(CAST(CURRENT_TIMESTAMP() AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(LOCALTIMESTAMP, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(LOCALTIMESTAMP AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date)) AS time_period,
    TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(TO_TIMESTAMP_NTZ(LOCALTIMESTAMP()) AS date))) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.[country] AS grouping_level_1,
    analyzed_table.[state] AS grouping_level_2,
    DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0) AS time_period,
    CAST((DATEADD(month, DATEDIFF(month, 0, SYSDATETIMEOFFSET()), 0)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state]
ORDER BY level_1, level_2
        , 

daily partition data staleness

Check description
Daily partitioned check calculating the time difference in days between the current date and the most recent data ingestion timestamp (staleness)

Check name Check type Time scale Sensor definition Quality rule
daily_partition_data_staleness partitioned daily data_staleness max_days

Enable check (Shell)
To enable this check provide connection name and check name in check enable command

dqo> check enable -c=connection_name -ch=daily_partition_data_staleness
Run check (Shell)
To run this check provide check name in check run command
dqo> check run -ch=daily_partition_data_staleness
It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below
dqo> check run -c=connection_name -ch=daily_partition_data_staleness
It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=table_name -ch=daily_partition_data_staleness
It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=daily_partition_data_staleness
Check structure (Yaml)
  partitioned_checks:
    daily:
      timeliness:
        daily_partition_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  partitioned_checks:
    daily:
      timeliness:
        daily_partition_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested

BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    CAST(analyzed_table.`` AS DATE) AS time_period,
    TIMESTAMP(CAST(analyzed_table.`` AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    DATE_FORMAT(analyzed_table.``, '%Y-%m-%d 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(analyzed_table.``, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    CAST(analyzed_table."" AS date) AS time_period,
    CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    CAST(analyzed_table."" AS date) AS time_period,
    CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    CAST(analyzed_table."" AS date) AS time_period,
    TO_TIMESTAMP(CAST(analyzed_table."" AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    CAST(analyzed_table.[] AS date) AS time_period,
    CAST((CAST(analyzed_table.[] AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY CAST(analyzed_table.[] AS date), CAST(analyzed_table.[] AS date)
ORDER BY CAST(analyzed_table.[] AS date)

Configuration with data grouping

Click to see more

Sample configuration (Yaml)

# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  default_grouping_name: group_by_country_and_state
  groupings:
    group_by_country_and_state:
      level_1:
        source: column_value
        column: country
      level_2:
        source: column_value
        column: state
  partitioned_checks:
    daily:
      timeliness:
        daily_partition_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested
    country:
      labels:
      - column used as the first grouping key
    state:
      labels:
      - column used as the second grouping key
BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    CAST(analyzed_table.`` AS DATE) AS time_period,
    TIMESTAMP(CAST(analyzed_table.`` AS DATE)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.`country` AS grouping_level_1,
    analyzed_table.`state` AS grouping_level_2,
    DATE_FORMAT(analyzed_table.``, '%Y-%m-%d 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(analyzed_table.``, '%Y-%m-%d 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(analyzed_table."" AS date) AS time_period,
    CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(analyzed_table."" AS date) AS time_period,
    CAST((CAST(analyzed_table."" AS date)) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX(
            TRY_TO_TIMESTAMP(analyzed_table."col_inserted_at")
        ),
        CURRENT_TIMESTAMP
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    analyzed_table."country" AS grouping_level_1,
    analyzed_table."state" AS grouping_level_2,
    CAST(analyzed_table."" AS date) AS time_period,
    TO_TIMESTAMP(CAST(analyzed_table."" AS date)) AS time_period_utc
FROM "your_snowflake_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc
ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc

SQL Server

{% import '/dialects/sqlserver.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(DAY,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            GETDATE()
        ) / 24.0 / 3600.0
    {%- else -%}
        DATEDIFF(SECOND,
            MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    DATEDIFF(SECOND,
            MAX(analyzed_table.[col_inserted_at]),
            SYSDATETIMEOFFSET()
        ) / 24.0 / 3600.0 AS actual_value,
    analyzed_table.[country] AS grouping_level_1,
    analyzed_table.[state] AS grouping_level_2,
    CAST(analyzed_table.[] AS date) AS time_period,
    CAST((CAST(analyzed_table.[] AS date)) AS DATETIME) AS time_period_utc
FROM [your_sql_server_database].[<target_schema>].[<target_table>] AS analyzed_table
GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[] AS date), CAST(analyzed_table.[] AS date)
ORDER BY level_1, level_2CAST(analyzed_table.[] AS date)

monthly partition data staleness

Check description
Monthly partitioned check calculating the time difference in days between the current date and the most recent data data ingestion timestamp (staleness)

Check name Check type Time scale Sensor definition Quality rule
monthly_partition_data_staleness partitioned monthly data_staleness max_days

Enable check (Shell)
To enable this check provide connection name and check name in check enable command

dqo> check enable -c=connection_name -ch=monthly_partition_data_staleness
Run check (Shell)
To run this check provide check name in check run command
dqo> check run -ch=monthly_partition_data_staleness
It is also possible to run this check on a specific connection. In order to do this, add the connection name to the below
dqo> check run -c=connection_name -ch=monthly_partition_data_staleness
It is additionally feasible to run this check on a specific table. In order to do this, add the table name to the below
dqo> check run -c=connection_name -t=table_name -ch=monthly_partition_data_staleness
It is furthermore viable to combine run this check on a specific column. In order to do this, add the column name to the below
dqo> check run -c=connection_name -t=table_name -col=column_name -ch=monthly_partition_data_staleness
Check structure (Yaml)
  partitioned_checks:
    monthly:
      timeliness:
        monthly_partition_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
Sample configuration (Yaml)
# yaml-language-server: $schema=https://cloud.dqo.ai/dqo-yaml-schema/TableYaml-schema.json
apiVersion: dqo/v1
kind: table
spec:
  timestamp_columns:
    event_timestamp_column: col_event_timestamp
    ingestion_timestamp_column: col_inserted_at
  incremental_time_window:
    daily_partitioning_recent_days: 7
    monthly_partitioning_recent_months: 1
  partitioned_checks:
    monthly:
      timeliness:
        monthly_partition_data_staleness:
          warning:
            max_days: 1.0
          error:
            max_days: 2.0
          fatal:
            max_days: 1.0
  columns:
    col_event_timestamp:
      labels:
      - optional column that stores the timestamp when the event/transaction happened
    col_inserted_at:
      labels:
      - optional column that stores the timestamp when row was ingested

BigQuery

{% import '/dialects/bigquery.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATE_DIFF(
        CURRENT_DATE(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        DAY
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATETIME_DIFF(
        CURRENT_DATETIME(),
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- else -%}
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMP_DIFF(
        CURRENT_TIMESTAMP(),
        MAX(
            SAFE_CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)
        ),
        MILLISECOND
    ) / 24.0 / 3600.0 / 1000.0 AS actual_value,
    DATE_TRUNC(CAST(analyzed_table.`` AS DATE), MONTH) AS time_period,
    TIMESTAMP(DATE_TRUNC(CAST(analyzed_table.`` AS DATE), MONTH)) AS time_period_utc
FROM `your-google-project-id`.`<target_schema>`.`<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

MySQL

{% import '/dialects/mysql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    DATEDIFF(
        DAY,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_DATE()
    )
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- else -%}
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    TIMESTAMPDIFF(
        SECOND,
        MAX(CAST(analyzed_table.`col_inserted_at` AS TIMESTAMP)),
        CURRENT_TIMESTAMP()
    ) / 24.0 / 3600.0 AS actual_value,
    DATE_FORMAT(analyzed_table.``, '%Y-%m-01 00:00:00') AS time_period,
    FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(analyzed_table.``, '%Y-%m-01 00:00:00'))) AS time_period_utc
FROM `<target_table>` AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

PostgreSQL

{% import '/dialects/postgresql.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_postgresql_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Redshift

{% import '/dialects/redshift.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
        CURRENT_DATE - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})
    )) / 24.0 / 3600.0
    {%- else -%}
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX(({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }})::TIMESTAMP)
    )) / 24.0 / 3600.0
    {%- endif -%}
{%- endmacro -%}

SELECT
    {{ render_current_ingestion_diff() }} AS actual_value
    {{- lib.render_data_grouping_projections('analyzed_table') }}
    {{- lib.render_time_dimension_projection('analyzed_table') }}
FROM {{ lib.render_target_table() }} AS analyzed_table
{{- lib.render_where_clause() -}}
{{- lib.render_group_by() -}}
{{- lib.render_order_by() -}}
SELECT
    EXTRACT(EPOCH FROM (
        CURRENT_TIMESTAMP - MAX((analyzed_table."col_inserted_at")::TIMESTAMP)
    )) / 24.0 / 3600.0 AS actual_value,
    DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date)) AS time_period,
    CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc
FROM "your_redshift_database"."<target_schema>"."<target_table>" AS analyzed_table
GROUP BY time_period, time_period_utc
ORDER BY time_period, time_period_utc

Snowflake

{% import '/dialects/snowflake.sql.jinja2' as lib with context -%}

{% macro render_current_ingestion_diff() -%}
    {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%}
    TIMESTAMPDIFF(
        MILLISECOND,
        MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column,