Last updated: July 22, 2025
DQOps REST API connections operations
Operations for adding/updating/deleting the configuration of data sources managed by DQOps.
bulk_activate_connection_checks
Activates all named check on this connection in the locations specified by filter
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string | |
check_name |
Check name | string |
Request body
Description | Data type | Required |
---|---|---|
Check search filters and rules configuration | AllChecksPatchParameters |
Usage examples
Execution
curl -X PUT http://localhost:8888/api/connections/sample_connection/checks/sample_check/bulkactivate^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"{\"check_search_filters\":{\"connection\":\"sample_connection\",\"fullTableName\":\"sample_schema.sample_table\",\"enabled\":true,\"column\":\"sample_column\",\"columnDataType\":\"string\"},\"check_model_patch\":{\"check_name\":\"sample_check\",\"help_text\":\"Sample help text\",\"sensor_parameters\":[],\"sensor_name\":\"sample_target/sample_category/table/volume/row_count\",\"quality_dimension\":\"sample_quality_dimension\",\"supports_error_sampling\":false,\"supports_grouping\":false,\"default_severity\":\"error\",\"disabled\":false,\"exclude_from_kpi\":false,\"include_in_sla\":false,\"configured\":false,\"can_edit\":false,\"can_run_checks\":false,\"can_delete_data\":false},\"override_conflicts\":true}"
Execution
from dqops import client
from dqops.client.api.connections import bulk_activate_connection_checks
from dqops.client.models import AllChecksPatchParameters, \
CheckModel, \
CheckSearchFilters, \
DefaultRuleSeverityLevel, \
FieldModel
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = AllChecksPatchParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
),
check_model_patch=CheckModel(
check_name='sample_check',
help_text='Sample help text',
sensor_parameters=[
],
sensor_name='sample_target/sample_category/table/volume/row_count',
quality_dimension='sample_quality_dimension',
supports_error_sampling=False,
supports_grouping=False,
standard=False,
default_check=False,
default_severity=DefaultRuleSeverityLevel.ERROR,
disabled=False,
exclude_from_kpi=False,
include_in_sla=False,
configured=False,
always_collect_error_samples=False,
do_not_schedule=False,
can_edit=False,
can_run_checks=False,
can_delete_data=False
),
override_conflicts=True
)
call_result = bulk_activate_connection_checks.sync(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import bulk_activate_connection_checks
from dqops.client.models import AllChecksPatchParameters, \
CheckModel, \
CheckSearchFilters, \
DefaultRuleSeverityLevel, \
FieldModel
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = AllChecksPatchParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
),
check_model_patch=CheckModel(
check_name='sample_check',
help_text='Sample help text',
sensor_parameters=[
],
sensor_name='sample_target/sample_category/table/volume/row_count',
quality_dimension='sample_quality_dimension',
supports_error_sampling=False,
supports_grouping=False,
standard=False,
default_check=False,
default_severity=DefaultRuleSeverityLevel.ERROR,
disabled=False,
exclude_from_kpi=False,
include_in_sla=False,
configured=False,
always_collect_error_samples=False,
do_not_schedule=False,
can_edit=False,
can_run_checks=False,
can_delete_data=False
),
override_conflicts=True
)
call_result = await bulk_activate_connection_checks.asyncio(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import bulk_activate_connection_checks
from dqops.client.models import AllChecksPatchParameters, \
CheckModel, \
CheckSearchFilters, \
DefaultRuleSeverityLevel, \
FieldModel
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = AllChecksPatchParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
),
check_model_patch=CheckModel(
check_name='sample_check',
help_text='Sample help text',
sensor_parameters=[
],
sensor_name='sample_target/sample_category/table/volume/row_count',
quality_dimension='sample_quality_dimension',
supports_error_sampling=False,
supports_grouping=False,
standard=False,
default_check=False,
default_severity=DefaultRuleSeverityLevel.ERROR,
disabled=False,
exclude_from_kpi=False,
include_in_sla=False,
configured=False,
always_collect_error_samples=False,
do_not_schedule=False,
can_edit=False,
can_run_checks=False,
can_delete_data=False
),
override_conflicts=True
)
call_result = bulk_activate_connection_checks.sync(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import bulk_activate_connection_checks
from dqops.client.models import AllChecksPatchParameters, \
CheckModel, \
CheckSearchFilters, \
DefaultRuleSeverityLevel, \
FieldModel
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = AllChecksPatchParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
),
check_model_patch=CheckModel(
check_name='sample_check',
help_text='Sample help text',
sensor_parameters=[
],
sensor_name='sample_target/sample_category/table/volume/row_count',
quality_dimension='sample_quality_dimension',
supports_error_sampling=False,
supports_grouping=False,
standard=False,
default_check=False,
default_severity=DefaultRuleSeverityLevel.ERROR,
disabled=False,
exclude_from_kpi=False,
include_in_sla=False,
configured=False,
always_collect_error_samples=False,
do_not_schedule=False,
can_edit=False,
can_run_checks=False,
can_delete_data=False
),
override_conflicts=True
)
call_result = await bulk_activate_connection_checks.asyncio(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
bulk_deactivate_connection_checks
Deactivates (deletes) all named check on this connection in the locations specified by filter
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string | |
check_name |
Check name | string |
Request body
Description | Data type | Required |
---|---|---|
Check search filters and table/column selectors. | BulkCheckDeactivateParameters |
Usage examples
Execution
curl -X PUT http://localhost:8888/api/connections/sample_connection/checks/sample_check/bulkdeactivate^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"{\"check_search_filters\":{\"connection\":\"sample_connection\",\"fullTableName\":\"sample_schema.sample_table\",\"enabled\":true,\"column\":\"sample_column\",\"columnDataType\":\"string\"}}"
Execution
from dqops import client
from dqops.client.api.connections import bulk_deactivate_connection_checks
from dqops.client.models import BulkCheckDeactivateParameters, \
CheckSearchFilters
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = BulkCheckDeactivateParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
)
)
call_result = bulk_deactivate_connection_checks.sync(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import bulk_deactivate_connection_checks
from dqops.client.models import BulkCheckDeactivateParameters, \
CheckSearchFilters
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = BulkCheckDeactivateParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
)
)
call_result = await bulk_deactivate_connection_checks.asyncio(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import bulk_deactivate_connection_checks
from dqops.client.models import BulkCheckDeactivateParameters, \
CheckSearchFilters
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = BulkCheckDeactivateParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
)
)
call_result = bulk_deactivate_connection_checks.sync(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import bulk_deactivate_connection_checks
from dqops.client.models import BulkCheckDeactivateParameters, \
CheckSearchFilters
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = BulkCheckDeactivateParameters(
check_search_filters=CheckSearchFilters(
column='sample_column',
column_data_type='string',
connection='sample_connection',
full_table_name='sample_schema.sample_table',
enabled=True
)
)
call_result = await bulk_deactivate_connection_checks.asyncio(
'sample_connection',
'sample_check',
client=dqops_client,
json_body=request_body
)
create_connection
Creates a new connection
Follow the link to see the source code on GitHub.
POST
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
Connection specification | ConnectionSpec |
Usage examples
Execution
curl -X POST http://localhost:8888/api/connections/sample_connection^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"{\"provider_type\":\"postgresql\",\"postgresql\":{\"host\":\"localhost\",\"port\":\"5432\",\"database\":\"db\",\"user\":\"PASSWD\",\"sslmode\":\"disable\"},\"parallel_jobs_limit\":4,\"incident_grouping\":{\"grouping_level\":\"table_dimension_category\",\"minimum_severity\":\"warning\",\"max_incident_length_days\":60,\"mute_for_days\":60}}"
Execution
from dqops import client
from dqops.client.api.connections import create_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = create_connection.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import create_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = await create_connection.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import create_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = create_connection.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import create_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = await create_connection.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
create_connection_basic
Creates a new connection given the basic information.
Follow the link to see the source code on GitHub.
POST
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
Basic connection model | ConnectionModel |
Usage examples
Execution
curl -X POST http://localhost:8888/api/connections/sample_connection/basic^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"{\"connection_name\":\"sample_connection\",\"parallel_jobs_limit\":4,\"provider_type\":\"postgresql\",\"postgresql\":{\"host\":\"localhost\",\"port\":\"5432\",\"database\":\"db\",\"user\":\"PASSWD\",\"sslmode\":\"disable\"},\"run_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true},\"run_profiling_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"profiling\"},\"run_monitoring_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"monitoring\"},\"run_partition_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"partitioned\"},\"collect_statistics_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"columnNames\":[]},\"data_clean_job_template\":{\"connection\":\"sample_connection\",\"deleteErrors\":true,\"deleteStatistics\":true,\"deleteCheckResults\":true,\"deleteSensorReadouts\":true,\"deleteErrorSamples\":true,\"deleteIncidents\":true,\"deleteChecksConfiguration\":false},\"advanced_properties\":{},\"can_edit\":false,\"can_collect_statistics\":true,\"can_run_checks\":true,\"can_delete_data\":true}"
Execution
from dqops import client
from dqops.client.api.connections import create_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = create_connection_basic.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import create_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = await create_connection_basic.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import create_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = create_connection_basic.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import create_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = await create_connection_basic.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
delete_connection
Deletes a connection
Follow the link to see the source code on GitHub.
DELETE
Return value
Property name | Description | Data type |
---|---|---|
dqo_queue_job_id |
DqoQueueJobId |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
Execution
Execution
Execution
get_all_connections
Returns a list of connections (data sources)
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
connection_model |
List[ConnectionModel] |
Usage examples
Execution
Expand to see the returned result
[ {
"connection_name" : "sample_connection",
"parallel_jobs_limit" : 4,
"provider_type" : "postgresql",
"postgresql" : {
"host" : "localhost",
"port" : "5432",
"database" : "db",
"user" : "PASSWD",
"sslmode" : "disable"
},
"run_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true
},
"run_profiling_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "profiling"
},
"run_monitoring_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "monitoring"
},
"run_partition_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "partitioned"
},
"collect_statistics_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"columnNames" : [ ]
},
"data_clean_job_template" : {
"connection" : "sample_connection",
"deleteErrors" : true,
"deleteStatistics" : true,
"deleteCheckResults" : true,
"deleteSensorReadouts" : true,
"deleteErrorSamples" : true,
"deleteIncidents" : true,
"deleteChecksConfiguration" : false
},
"advanced_properties" : { },
"can_edit" : false,
"can_collect_statistics" : true,
"can_run_checks" : true,
"can_delete_data" : true
}, {
"connection_name" : "sample_connection",
"parallel_jobs_limit" : 4,
"provider_type" : "postgresql",
"postgresql" : {
"host" : "localhost",
"port" : "5432",
"database" : "db",
"user" : "PASSWD",
"sslmode" : "disable"
},
"run_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true
},
"run_profiling_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "profiling"
},
"run_monitoring_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "monitoring"
},
"run_partition_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "partitioned"
},
"collect_statistics_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"columnNames" : [ ]
},
"data_clean_job_template" : {
"connection" : "sample_connection",
"deleteErrors" : true,
"deleteStatistics" : true,
"deleteCheckResults" : true,
"deleteSensorReadouts" : true,
"deleteErrorSamples" : true,
"deleteIncidents" : true,
"deleteChecksConfiguration" : false
},
"advanced_properties" : { },
"can_edit" : false,
"can_collect_statistics" : true,
"can_run_checks" : true,
"can_delete_data" : true
}, {
"connection_name" : "sample_connection",
"parallel_jobs_limit" : 4,
"provider_type" : "postgresql",
"postgresql" : {
"host" : "localhost",
"port" : "5432",
"database" : "db",
"user" : "PASSWD",
"sslmode" : "disable"
},
"run_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true
},
"run_profiling_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "profiling"
},
"run_monitoring_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "monitoring"
},
"run_partition_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "partitioned"
},
"collect_statistics_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"columnNames" : [ ]
},
"data_clean_job_template" : {
"connection" : "sample_connection",
"deleteErrors" : true,
"deleteStatistics" : true,
"deleteCheckResults" : true,
"deleteSensorReadouts" : true,
"deleteErrorSamples" : true,
"deleteIncidents" : true,
"deleteChecksConfiguration" : false
},
"advanced_properties" : { },
"can_edit" : false,
"can_collect_statistics" : true,
"can_run_checks" : true,
"can_delete_data" : true
} ]
Execution
from dqops import client
from dqops.client.api.connections import get_all_connections
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = get_all_connections.sync(
client=dqops_client
)
Expand to see the returned result
[
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
]
Execution
from dqops import client
from dqops.client.api.connections import get_all_connections
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = await get_all_connections.asyncio(
client=dqops_client
)
Expand to see the returned result
[
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
]
Execution
from dqops import client
from dqops.client.api.connections import get_all_connections
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_all_connections.sync(
client=dqops_client
)
Expand to see the returned result
[
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
]
Execution
from dqops import client
from dqops.client.api.connections import get_all_connections
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_all_connections.asyncio(
client=dqops_client
)
Expand to see the returned result
[
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
),
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
]
get_connection
Return the full details of a connection given the connection name
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
connection_specification_model |
ConnectionSpecificationModel |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
Execution
Execution
Execution
from dqops import client
from dqops.client.api.connections import get_connection
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection.sync(
'sample_connection',
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection.asyncio(
'sample_connection',
client=dqops_client
)
get_connection_auto_import
Return the configuration of the table auto import for a connection
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
auto_import_tables_spec |
AutoImportTablesSpec |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
Execution
Execution
Execution
from dqops import client
from dqops.client.api.connections import get_connection_auto_import
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_auto_import.sync(
'sample_connection',
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_auto_import
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_auto_import.asyncio(
'sample_connection',
client=dqops_client
)
get_connection_basic
Return the basic details of a connection given the connection name
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
connection_model |
ConnectionModel |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
Expand to see the returned result
{
"connection_name" : "sample_connection",
"parallel_jobs_limit" : 4,
"provider_type" : "postgresql",
"postgresql" : {
"host" : "localhost",
"port" : "5432",
"database" : "db",
"user" : "PASSWD",
"sslmode" : "disable"
},
"run_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true
},
"run_profiling_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "profiling"
},
"run_monitoring_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "monitoring"
},
"run_partition_checks_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"checkType" : "partitioned"
},
"collect_statistics_job_template" : {
"connection" : "sample_connection",
"enabled" : true,
"columnNames" : [ ]
},
"data_clean_job_template" : {
"connection" : "sample_connection",
"deleteErrors" : true,
"deleteStatistics" : true,
"deleteCheckResults" : true,
"deleteSensorReadouts" : true,
"deleteErrorSamples" : true,
"deleteIncidents" : true,
"deleteChecksConfiguration" : false
},
"advanced_properties" : { },
"can_edit" : false,
"can_collect_statistics" : true,
"can_run_checks" : true,
"can_delete_data" : true
}
Execution
from dqops import client
from dqops.client.api.connections import get_connection_basic
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = get_connection_basic.sync(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_basic
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = await get_connection_basic.asyncio(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_basic
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_basic.sync(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_basic
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_basic.asyncio(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
get_connection_comments
Return the comments for a connection
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
comment_spec |
List[CommentSpec] |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
curl http://localhost:8888/api/connections/sample_connection/comments^
-H "Accept: application/json"
Expand to see the returned result
Execution
from dqops import client
from dqops.client.api.connections import get_connection_comments
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = get_connection_comments.sync(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
[
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
Execution
from dqops import client
from dqops.client.api.connections import get_connection_comments
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = await get_connection_comments.asyncio(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
[
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
Execution
from dqops import client
from dqops.client.api.connections import get_connection_comments
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_comments.sync(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
[
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
Execution
from dqops import client
from dqops.client.api.connections import get_connection_comments
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_comments.asyncio(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
[
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
get_connection_common_columns
Finds common column names that are used on one or more tables. The list of columns is sorted in descending order by column name.
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
common_column_model |
List[CommonColumnModel] |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
Execution
Execution
Execution
from dqops import client
from dqops.client.api.connections import get_connection_common_columns
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_common_columns.sync(
'sample_connection',
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_common_columns
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_common_columns.asyncio(
'sample_connection',
client=dqops_client
)
get_connection_default_grouping_configuration
Return the default data grouping configuration for a connection
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
data_grouping_configuration_spec |
DataGroupingConfigurationSpec |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
Execution
from dqops import client
from dqops.client.api.connections import get_connection_default_grouping_configuration
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = get_connection_default_grouping_configuration.sync(
'sample_connection',
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_default_grouping_configuration
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = await get_connection_default_grouping_configuration.asyncio(
'sample_connection',
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_default_grouping_configuration
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_default_grouping_configuration.sync(
'sample_connection',
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_default_grouping_configuration
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_default_grouping_configuration.asyncio(
'sample_connection',
client=dqops_client
)
get_connection_incident_grouping
Retrieves the configuration of data quality incident grouping and incident notifications
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
connection_incident_grouping_spec |
ConnectionIncidentGroupingSpec |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
curl http://localhost:8888/api/connections/sample_connection/incidentgrouping^
-H "Accept: application/json"
Expand to see the returned result
{
"grouping_level" : "table_dimension",
"minimum_severity" : "warning",
"divide_by_data_groups" : true,
"max_incident_length_days" : 60,
"mute_for_days" : 60,
"incident_notification" : {
"incident_opened_addresses" : "https://sample_url.com/opened",
"incident_acknowledged_addresses" : "https://sample_url.com/acknowledged",
"incident_resolved_addresses" : "https://sample_url.com/resolved",
"incident_muted_addresses" : "https://sample_url.com/muted"
}
}
Execution
from dqops import client
from dqops.client.api.connections import get_connection_incident_grouping
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = get_connection_incident_grouping.sync(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_incident_grouping
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = await get_connection_incident_grouping.asyncio(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_incident_grouping
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_incident_grouping.sync(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_incident_grouping
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_incident_grouping.asyncio(
'sample_connection',
client=dqops_client
)
Expand to see the returned result
ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
get_connection_labels
Return the labels for a connection
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
string |
List[string] |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Usage examples
Execution
Execution
Execution
Execution
from dqops import client
from dqops.client.api.connections import get_connection_labels
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_labels.sync(
'sample_connection',
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_labels
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_labels.asyncio(
'sample_connection',
client=dqops_client
)
get_connection_scheduling_group
Return the schedule for a connection for a scheduling group
Follow the link to see the source code on GitHub.
GET
Return value
Property name | Description | Data type |
---|---|---|
cron_schedule_spec |
CronScheduleSpec |
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string | |
scheduling_group |
Check scheduling group (named schedule) | CheckRunScheduleGroup |
Usage examples
Execution
Execution
from dqops import client
from dqops.client.api.connections import get_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = get_connection_scheduling_group.sync(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup
dqops_client = client.Client(
'http://localhost:8888/',
raise_on_unexpected_status=True
)
call_result = await get_connection_scheduling_group.asyncio(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = get_connection_scheduling_group.sync(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client
)
Execution
from dqops import client
from dqops.client.api.connections import get_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token,
raise_on_unexpected_status=True
)
call_result = await get_connection_scheduling_group.asyncio(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client
)
update_connection
Updates an existing connection
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
Connection specification | ConnectionSpec |
Usage examples
Execution
curl -X PUT http://localhost:8888/api/connections/sample_connection^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"{\"provider_type\":\"postgresql\",\"postgresql\":{\"host\":\"localhost\",\"port\":\"5432\",\"database\":\"db\",\"user\":\"PASSWD\",\"sslmode\":\"disable\"},\"parallel_jobs_limit\":4,\"incident_grouping\":{\"grouping_level\":\"table_dimension_category\",\"minimum_severity\":\"warning\",\"max_incident_length_days\":60,\"mute_for_days\":60}}"
Execution
from dqops import client
from dqops.client.api.connections import update_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = update_connection.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = await update_connection.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = update_connection.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection
from dqops.client.models import ConnectionIncidentGroupingSpec, \
ConnectionSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionSpec(
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
parallel_jobs_limit=4,
incident_grouping=ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION_CATEGORY,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=False,
max_incident_length_days=60,
mute_for_days=60,
disabled=False
),
advanced_properties={
}
)
call_result = await update_connection.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
update_connection_auto_import
Updates the configuration of the table auto import on a connection. The auto import specifies the table filters and a CRON schedule.
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
Auto import settings to store | AutoImportTablesSpec |
Usage examples
Execution
Execution
from dqops import client
from dqops.client.api.connections import update_connection_auto_import
from dqops.client.models import AutoImportTablesSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = AutoImportTablesSpec()
call_result = update_connection_auto_import.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_auto_import
from dqops.client.models import AutoImportTablesSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = AutoImportTablesSpec()
call_result = await update_connection_auto_import.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_auto_import
from dqops.client.models import AutoImportTablesSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = AutoImportTablesSpec()
call_result = update_connection_auto_import.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_auto_import
from dqops.client.models import AutoImportTablesSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = AutoImportTablesSpec()
call_result = await update_connection_auto_import.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
update_connection_basic
Updates the basic information of a connection
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
Connection basic details | ConnectionModel |
Usage examples
Execution
curl -X PUT http://localhost:8888/api/connections/sample_connection/basic^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"{\"connection_name\":\"sample_connection\",\"parallel_jobs_limit\":4,\"provider_type\":\"postgresql\",\"postgresql\":{\"host\":\"localhost\",\"port\":\"5432\",\"database\":\"db\",\"user\":\"PASSWD\",\"sslmode\":\"disable\"},\"run_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true},\"run_profiling_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"profiling\"},\"run_monitoring_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"monitoring\"},\"run_partition_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"partitioned\"},\"collect_statistics_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"columnNames\":[]},\"data_clean_job_template\":{\"connection\":\"sample_connection\",\"deleteErrors\":true,\"deleteStatistics\":true,\"deleteCheckResults\":true,\"deleteSensorReadouts\":true,\"deleteErrorSamples\":true,\"deleteIncidents\":true,\"deleteChecksConfiguration\":false},\"advanced_properties\":{},\"can_edit\":false,\"can_collect_statistics\":true,\"can_run_checks\":true,\"can_delete_data\":true}"
Execution
from dqops import client
from dqops.client.api.connections import update_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = update_connection_basic.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = await update_connection_basic.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = update_connection_basic.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_basic
from dqops.client.models import CheckSearchFilters, \
ConnectionModel, \
DeleteStoredDataQueueJobParameters, \
PostgresqlParametersSpec, \
PostgresqlSslMode, \
ProviderType, \
StatisticsCollectorSearchFilters
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionModel(
connection_name='sample_connection',
parallel_jobs_limit=4,
provider_type=ProviderType.POSTGRESQL,
postgresql=PostgresqlParametersSpec(
host='localhost',
port='5432',
database='db',
user='PASSWD',
sslmode=PostgresqlSslMode.DISABLE
),
run_checks_job_template=CheckSearchFilters(
connection='sample_connection',
enabled=True
),
run_profiling_checks_job_template=CheckSearchFilters(
check_type=CheckType.PROFILING,
connection='sample_connection',
enabled=True
),
run_monitoring_checks_job_template=CheckSearchFilters(
check_type=CheckType.MONITORING,
connection='sample_connection',
enabled=True
),
run_partition_checks_job_template=CheckSearchFilters(
check_type=CheckType.PARTITIONED,
connection='sample_connection',
enabled=True
),
collect_statistics_job_template=StatisticsCollectorSearchFilters(
column_names=[
],
connection='sample_connection',
enabled=True
),
data_clean_job_template=DeleteStoredDataQueueJobParameters(
connection='sample_connection',
delete_errors=True,
delete_statistics=True,
delete_check_results=True,
delete_sensor_readouts=True,
delete_error_samples=True,
delete_incidents=True,
delete_checks_configuration=False
),
advanced_properties={
},
can_edit=False,
can_collect_statistics=True,
can_run_checks=True,
can_delete_data=True
)
call_result = await update_connection_basic.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
update_connection_comments
Updates (replaces) the list of comments of a connection
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
List of comments | List[CommentSpec] |
Usage examples
Execution
curl -X PUT http://localhost:8888/api/connections/sample_connection/comments^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"[{\"date\":\"2007-12-03T10:15:30\",\"comment_by\":\"sample_user\",\"comment\":\"Sample comment\"},{\"date\":\"2007-12-03T10:15:30\",\"comment_by\":\"sample_user\",\"comment\":\"Sample comment\"},{\"date\":\"2007-12-03T10:15:30\",\"comment_by\":\"sample_user\",\"comment\":\"Sample comment\"}]"
Execution
from dqops import client
from dqops.client.api.connections import update_connection_comments
from dqops.client.models import CommentSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = [
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
call_result = update_connection_comments.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_comments
from dqops.client.models import CommentSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = [
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
call_result = await update_connection_comments.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_comments
from dqops.client.models import CommentSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = [
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
call_result = update_connection_comments.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_comments
from dqops.client.models import CommentSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = [
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
),
CommentSpec(
date=Some date/time value: [2007-12-03T10:15:30],
comment_by='sample_user',
comment='Sample comment'
)
]
call_result = await update_connection_comments.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
update_connection_default_grouping_configuration
Updates the default data grouping connection of a connection
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
Default data grouping configuration to be assigned to a connection | DataGroupingConfigurationSpec |
Usage examples
Execution
Execution
from dqops import client
from dqops.client.api.connections import update_connection_default_grouping_configuration
from dqops.client.models import DataGroupingConfigurationSpec, \
DataGroupingDimensionSource, \
DataGroupingDimensionSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = DataGroupingConfigurationSpec(
level_3=DataGroupingDimensionSpec(
source=DataGroupingDimensionSource.COLUMN_VALUE,
column='sample_column'
)
)
call_result = update_connection_default_grouping_configuration.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_default_grouping_configuration
from dqops.client.models import DataGroupingConfigurationSpec, \
DataGroupingDimensionSource, \
DataGroupingDimensionSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = DataGroupingConfigurationSpec(
level_3=DataGroupingDimensionSpec(
source=DataGroupingDimensionSource.COLUMN_VALUE,
column='sample_column'
)
)
call_result = await update_connection_default_grouping_configuration.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_default_grouping_configuration
from dqops.client.models import DataGroupingConfigurationSpec, \
DataGroupingDimensionSource, \
DataGroupingDimensionSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = DataGroupingConfigurationSpec(
level_3=DataGroupingDimensionSpec(
source=DataGroupingDimensionSource.COLUMN_VALUE,
column='sample_column'
)
)
call_result = update_connection_default_grouping_configuration.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_default_grouping_configuration
from dqops.client.models import DataGroupingConfigurationSpec, \
DataGroupingDimensionSource, \
DataGroupingDimensionSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = DataGroupingConfigurationSpec(
level_3=DataGroupingDimensionSpec(
source=DataGroupingDimensionSource.COLUMN_VALUE,
column='sample_column'
)
)
call_result = await update_connection_default_grouping_configuration.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
update_connection_incident_grouping
Updates (replaces) configuration of incident grouping and notifications on a connection (data source) level.
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
Incident grouping and notification configuration | ConnectionIncidentGroupingSpec |
Usage examples
Execution
curl -X PUT http://localhost:8888/api/connections/sample_connection/incidentgrouping^
-H "Accept: application/json"^
-H "Content-Type: application/json"^
-d^
"{\"grouping_level\":\"table_dimension\",\"minimum_severity\":\"warning\",\"divide_by_data_groups\":true,\"max_incident_length_days\":60,\"mute_for_days\":60,\"incident_notification\":{\"incident_opened_addresses\":\"https://sample_url.com/opened\",\"incident_acknowledged_addresses\":\"https://sample_url.com/acknowledged\",\"incident_resolved_addresses\":\"https://sample_url.com/resolved\",\"incident_muted_addresses\":\"https://sample_url.com/muted\"}}"
Execution
from dqops import client
from dqops.client.api.connections import update_connection_incident_grouping
from dqops.client.models import ConnectionIncidentGroupingSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
call_result = update_connection_incident_grouping.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_incident_grouping
from dqops.client.models import ConnectionIncidentGroupingSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
call_result = await update_connection_incident_grouping.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_incident_grouping
from dqops.client.models import ConnectionIncidentGroupingSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
call_result = update_connection_incident_grouping.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_incident_grouping
from dqops.client.models import ConnectionIncidentGroupingSpec, \
FilteredNotificationSpecMap, \
IncidentGroupingLevel, \
IncidentNotificationSpec, \
MinimumGroupingSeverityLevel
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = ConnectionIncidentGroupingSpec(
grouping_level=IncidentGroupingLevel.TABLE_DIMENSION,
minimum_severity=MinimumGroupingSeverityLevel.WARNING,
divide_by_data_groups=True,
max_incident_length_days=60,
mute_for_days=60,
disabled=False,
incident_notification=IncidentNotificationSpec(
incident_opened_addresses='https://sample_url.com/opened',
incident_acknowledged_addresses='https://sample_url.com/acknowledged',
incident_resolved_addresses='https://sample_url.com/resolved',
incident_muted_addresses='https://sample_url.com/muted',
filtered_notifications=FilteredNotificationSpecMap()
)
)
call_result = await update_connection_incident_grouping.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
update_connection_labels
Updates the list of labels of a connection
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string |
Request body
Description | Data type | Required |
---|---|---|
List of labels | List[string] |
Usage examples
Execution
Execution
from dqops import client
from dqops.client.api.connections import update_connection_labels
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = [
'sampleString_1',
'sampleString_2',
'sampleString_3'
]
call_result = update_connection_labels.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_labels
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = [
'sampleString_1',
'sampleString_2',
'sampleString_3'
]
call_result = await update_connection_labels.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_labels
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = [
'sampleString_1',
'sampleString_2',
'sampleString_3'
]
call_result = update_connection_labels.sync(
'sample_connection',
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_labels
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = [
'sampleString_1',
'sampleString_2',
'sampleString_3'
]
call_result = await update_connection_labels.asyncio(
'sample_connection',
client=dqops_client,
json_body=request_body
)
update_connection_scheduling_group
Updates the schedule of a connection for a scheduling group (named schedule for checks with a similar time series configuration)
Follow the link to see the source code on GitHub.
PUT
Parameters of this method are described below
Property name | Description | Data type | Required |
---|---|---|---|
connection_name |
Connection name | string | |
scheduling_group |
Check scheduling group (named schedule) | CheckRunScheduleGroup |
Request body
Description | Data type | Required |
---|---|---|
Monitoring schedule definition to store | CronScheduleSpec |
Usage examples
Execution
Execution
from dqops import client
from dqops.client.api.connections import update_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup, \
CronScheduleSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = CronScheduleSpec(
cron_expression='0 12 1 * *',
disabled=False
)
call_result = update_connection_scheduling_group.sync(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup, \
CronScheduleSpec
dqops_client = client.Client(
'http://localhost:8888/'
)
request_body = CronScheduleSpec(
cron_expression='0 12 1 * *',
disabled=False
)
call_result = await update_connection_scheduling_group.asyncio(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup, \
CronScheduleSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = CronScheduleSpec(
cron_expression='0 12 1 * *',
disabled=False
)
call_result = update_connection_scheduling_group.sync(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client,
json_body=request_body
)
Execution
from dqops import client
from dqops.client.api.connections import update_connection_scheduling_group
from dqops.client.models import CheckRunScheduleGroup, \
CronScheduleSpec
token = 's4mp13_4u7h_70k3n'
dqops_client = client.AuthenticatedClient(
'http://localhost:8888/',
token=token
)
request_body = CronScheduleSpec(
cron_expression='0 12 1 * *',
disabled=False
)
call_result = await update_connection_scheduling_group.asyncio(
'sample_connection',
CheckRunScheduleGroup.partitioned_daily,
client=dqops_client,
json_body=request_body
)