Skip to content

Last updated: July 22, 2025

DQOps REST API data_sources operations

Rest API controller that operates on data sources that are not yet imported, testing connections or retrieving the metadata (schemas and tables).


get_remote_data_source_schemas

Introspects a list of schemas inside a remote data source, identified by an already imported connection.

Follow the link to see the source code on GitHub.

GET

http://localhost:8888/api/datasource/connections/{connectionName}/schemas

Return value

 Property name   Description                       Data type 
schema_remote_model List[SchemaRemoteModel]

Parameters of this method are described below

 Property name   Description                       Data type   Required 
connection_name Connection name string

Usage examples

Execution

curl http://localhost:8888/api/datasource/connections/sample_connection/schemas^
    -H "Accept: application/json"
Expand to see the returned result
[ {
  "alreadyImported" : false
}, {
  "alreadyImported" : false
}, {
  "alreadyImported" : false
} ]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_schemas

dqops_client = client.Client(
    'http://localhost:8888/',
    raise_on_unexpected_status=True
)

call_result = get_remote_data_source_schemas.sync(
    'sample_connection',
    client=dqops_client
)
Expand to see the returned result
[
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False)
]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_schemas

dqops_client = client.Client(
    'http://localhost:8888/',
    raise_on_unexpected_status=True
)

call_result = await get_remote_data_source_schemas.asyncio(
    'sample_connection',
    client=dqops_client
)
Expand to see the returned result
[
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False)
]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_schemas

token = 's4mp13_4u7h_70k3n'

dqops_client = client.AuthenticatedClient(
    'http://localhost:8888/',
    token=token,
    raise_on_unexpected_status=True
)

call_result = get_remote_data_source_schemas.sync(
    'sample_connection',
    client=dqops_client
)
Expand to see the returned result
[
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False)
]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_schemas

token = 's4mp13_4u7h_70k3n'

dqops_client = client.AuthenticatedClient(
    'http://localhost:8888/',
    token=token,
    raise_on_unexpected_status=True
)

call_result = await get_remote_data_source_schemas.asyncio(
    'sample_connection',
    client=dqops_client
)
Expand to see the returned result
[
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False),
    SchemaRemoteModel(already_imported=False)
]

get_remote_data_source_tables

Introspects the list of columns inside a schema on a remote data source that is identified by a connection that was added to DQOps.

Follow the link to see the source code on GitHub.

GET

http://localhost:8888/api/datasource/connections/{connectionName}/schemas/{schemaName}/tables

Return value

 Property name   Description                       Data type 
remote_table_list_model List[RemoteTableListModel]

Parameters of this method are described below

 Property name   Description                       Data type   Required 
connection_name Connection name string
schema_name Schema name string
table_name_contains Optional filter to return tables that contain this text inside the table name (case sensitive) string

Usage examples

Execution

curl http://localhost:8888/api/datasource/connections/sample_connection/schemas/sample_schema/tables^
    -H "Accept: application/json"
Expand to see the returned result
[ {
  "alreadyImported" : false
}, {
  "alreadyImported" : false
}, {
  "alreadyImported" : false
} ]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_tables

dqops_client = client.Client(
    'http://localhost:8888/',
    raise_on_unexpected_status=True
)

call_result = get_remote_data_source_tables.sync(
    'sample_connection',
    'sample_schema',
    client=dqops_client
)
Expand to see the returned result
[
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False)
]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_tables

dqops_client = client.Client(
    'http://localhost:8888/',
    raise_on_unexpected_status=True
)

call_result = await get_remote_data_source_tables.asyncio(
    'sample_connection',
    'sample_schema',
    client=dqops_client
)
Expand to see the returned result
[
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False)
]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_tables

token = 's4mp13_4u7h_70k3n'

dqops_client = client.AuthenticatedClient(
    'http://localhost:8888/',
    token=token,
    raise_on_unexpected_status=True
)

call_result = get_remote_data_source_tables.sync(
    'sample_connection',
    'sample_schema',
    client=dqops_client
)
Expand to see the returned result
[
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False)
]

Execution

from dqops import client
from dqops.client.api.data_sources import get_remote_data_source_tables

token = 's4mp13_4u7h_70k3n'

dqops_client = client.AuthenticatedClient(
    'http://localhost:8888/',
    token=token,
    raise_on_unexpected_status=True
)

call_result = await get_remote_data_source_tables.asyncio(
    'sample_connection',
    'sample_schema',
    client=dqops_client
)
Expand to see the returned result
[
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False),
    RemoteTableListModel(already_imported=False)
]

test_connection

Checks if the given remote connection can be opened and if the credentials are valid

Follow the link to see the source code on GitHub.

POST

http://localhost:8888/api/datasource/testconnection

Return value

 Property name   Description                       Data type 
connection_test_model ConnectionTestModel

Parameters of this method are described below

 Property name   Description                       Data type   Required 
verify_name_uniqueness Verify if the connection name is unique, the default value is true boolean

Request body

 Description                       Data type   Required 
Basic connection model ConnectionModel

Usage examples

Execution

curl -X POST http://localhost:8888/api/datasource/testconnection^
    -H "Accept: application/json"^
    -H "Content-Type: application/json"^
    -d^
    "{\"connection_name\":\"sample_connection\",\"parallel_jobs_limit\":4,\"provider_type\":\"postgresql\",\"postgresql\":{\"host\":\"localhost\",\"port\":\"5432\",\"database\":\"db\",\"user\":\"PASSWD\",\"sslmode\":\"disable\"},\"run_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true},\"run_profiling_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"profiling\"},\"run_monitoring_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"monitoring\"},\"run_partition_checks_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"checkType\":\"partitioned\"},\"collect_statistics_job_template\":{\"connection\":\"sample_connection\",\"enabled\":true,\"columnNames\":[]},\"data_clean_job_template\":{\"connection\":\"sample_connection\",\"deleteErrors\":true,\"deleteStatistics\":true,\"deleteCheckResults\":true,\"deleteSensorReadouts\":true,\"deleteErrorSamples\":true,\"deleteIncidents\":true,\"deleteChecksConfiguration\":false},\"advanced_properties\":{},\"can_edit\":false,\"can_collect_statistics\":true,\"can_run_checks\":true,\"can_delete_data\":true}"
Expand to see the returned result
{ }

Execution

from dqops import client
from dqops.client.api.data_sources import test_connection
from dqops.client.models import CheckSearchFilters, \
                                ConnectionModel, \
                                DeleteStoredDataQueueJobParameters, \
                                PostgresqlParametersSpec, \
                                PostgresqlSslMode, \
                                ProviderType, \
                                StatisticsCollectorSearchFilters

dqops_client = client.Client(
    'http://localhost:8888/'
)

request_body = ConnectionModel(
    connection_name='sample_connection',
    parallel_jobs_limit=4,
    provider_type=ProviderType.POSTGRESQL,
    postgresql=PostgresqlParametersSpec(
        host='localhost',
        port='5432',
        database='db',
        user='PASSWD',
        sslmode=PostgresqlSslMode.DISABLE
    ),
    run_checks_job_template=CheckSearchFilters(
        connection='sample_connection',
        enabled=True
    ),
    run_profiling_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PROFILING,
        connection='sample_connection',
        enabled=True
    ),
    run_monitoring_checks_job_template=CheckSearchFilters(
        check_type=CheckType.MONITORING,
        connection='sample_connection',
        enabled=True
    ),
    run_partition_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PARTITIONED,
        connection='sample_connection',
        enabled=True
    ),
    collect_statistics_job_template=StatisticsCollectorSearchFilters(
        column_names=[

        ],
        connection='sample_connection',
        enabled=True
    ),
    data_clean_job_template=DeleteStoredDataQueueJobParameters(
        connection='sample_connection',
        delete_errors=True,
        delete_statistics=True,
        delete_check_results=True,
        delete_sensor_readouts=True,
        delete_error_samples=True,
        delete_incidents=True,
        delete_checks_configuration=False
    ),
    advanced_properties={

    },
    can_edit=False,
    can_collect_statistics=True,
    can_run_checks=True,
    can_delete_data=True
)

call_result = test_connection.sync(
    client=dqops_client,
    json_body=request_body
)
Expand to see the returned result
ConnectionTestModel()

Execution

from dqops import client
from dqops.client.api.data_sources import test_connection
from dqops.client.models import CheckSearchFilters, \
                                ConnectionModel, \
                                DeleteStoredDataQueueJobParameters, \
                                PostgresqlParametersSpec, \
                                PostgresqlSslMode, \
                                ProviderType, \
                                StatisticsCollectorSearchFilters

dqops_client = client.Client(
    'http://localhost:8888/'
)

request_body = ConnectionModel(
    connection_name='sample_connection',
    parallel_jobs_limit=4,
    provider_type=ProviderType.POSTGRESQL,
    postgresql=PostgresqlParametersSpec(
        host='localhost',
        port='5432',
        database='db',
        user='PASSWD',
        sslmode=PostgresqlSslMode.DISABLE
    ),
    run_checks_job_template=CheckSearchFilters(
        connection='sample_connection',
        enabled=True
    ),
    run_profiling_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PROFILING,
        connection='sample_connection',
        enabled=True
    ),
    run_monitoring_checks_job_template=CheckSearchFilters(
        check_type=CheckType.MONITORING,
        connection='sample_connection',
        enabled=True
    ),
    run_partition_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PARTITIONED,
        connection='sample_connection',
        enabled=True
    ),
    collect_statistics_job_template=StatisticsCollectorSearchFilters(
        column_names=[

        ],
        connection='sample_connection',
        enabled=True
    ),
    data_clean_job_template=DeleteStoredDataQueueJobParameters(
        connection='sample_connection',
        delete_errors=True,
        delete_statistics=True,
        delete_check_results=True,
        delete_sensor_readouts=True,
        delete_error_samples=True,
        delete_incidents=True,
        delete_checks_configuration=False
    ),
    advanced_properties={

    },
    can_edit=False,
    can_collect_statistics=True,
    can_run_checks=True,
    can_delete_data=True
)

call_result = await test_connection.asyncio(
    client=dqops_client,
    json_body=request_body
)
Expand to see the returned result
ConnectionTestModel()

Execution

from dqops import client
from dqops.client.api.data_sources import test_connection
from dqops.client.models import CheckSearchFilters, \
                                ConnectionModel, \
                                DeleteStoredDataQueueJobParameters, \
                                PostgresqlParametersSpec, \
                                PostgresqlSslMode, \
                                ProviderType, \
                                StatisticsCollectorSearchFilters

token = 's4mp13_4u7h_70k3n'

dqops_client = client.AuthenticatedClient(
    'http://localhost:8888/',
    token=token
)

request_body = ConnectionModel(
    connection_name='sample_connection',
    parallel_jobs_limit=4,
    provider_type=ProviderType.POSTGRESQL,
    postgresql=PostgresqlParametersSpec(
        host='localhost',
        port='5432',
        database='db',
        user='PASSWD',
        sslmode=PostgresqlSslMode.DISABLE
    ),
    run_checks_job_template=CheckSearchFilters(
        connection='sample_connection',
        enabled=True
    ),
    run_profiling_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PROFILING,
        connection='sample_connection',
        enabled=True
    ),
    run_monitoring_checks_job_template=CheckSearchFilters(
        check_type=CheckType.MONITORING,
        connection='sample_connection',
        enabled=True
    ),
    run_partition_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PARTITIONED,
        connection='sample_connection',
        enabled=True
    ),
    collect_statistics_job_template=StatisticsCollectorSearchFilters(
        column_names=[

        ],
        connection='sample_connection',
        enabled=True
    ),
    data_clean_job_template=DeleteStoredDataQueueJobParameters(
        connection='sample_connection',
        delete_errors=True,
        delete_statistics=True,
        delete_check_results=True,
        delete_sensor_readouts=True,
        delete_error_samples=True,
        delete_incidents=True,
        delete_checks_configuration=False
    ),
    advanced_properties={

    },
    can_edit=False,
    can_collect_statistics=True,
    can_run_checks=True,
    can_delete_data=True
)

call_result = test_connection.sync(
    client=dqops_client,
    json_body=request_body
)
Expand to see the returned result
ConnectionTestModel()

Execution

from dqops import client
from dqops.client.api.data_sources import test_connection
from dqops.client.models import CheckSearchFilters, \
                                ConnectionModel, \
                                DeleteStoredDataQueueJobParameters, \
                                PostgresqlParametersSpec, \
                                PostgresqlSslMode, \
                                ProviderType, \
                                StatisticsCollectorSearchFilters

token = 's4mp13_4u7h_70k3n'

dqops_client = client.AuthenticatedClient(
    'http://localhost:8888/',
    token=token
)

request_body = ConnectionModel(
    connection_name='sample_connection',
    parallel_jobs_limit=4,
    provider_type=ProviderType.POSTGRESQL,
    postgresql=PostgresqlParametersSpec(
        host='localhost',
        port='5432',
        database='db',
        user='PASSWD',
        sslmode=PostgresqlSslMode.DISABLE
    ),
    run_checks_job_template=CheckSearchFilters(
        connection='sample_connection',
        enabled=True
    ),
    run_profiling_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PROFILING,
        connection='sample_connection',
        enabled=True
    ),
    run_monitoring_checks_job_template=CheckSearchFilters(
        check_type=CheckType.MONITORING,
        connection='sample_connection',
        enabled=True
    ),
    run_partition_checks_job_template=CheckSearchFilters(
        check_type=CheckType.PARTITIONED,
        connection='sample_connection',
        enabled=True
    ),
    collect_statistics_job_template=StatisticsCollectorSearchFilters(
        column_names=[

        ],
        connection='sample_connection',
        enabled=True
    ),
    data_clean_job_template=DeleteStoredDataQueueJobParameters(
        connection='sample_connection',
        delete_errors=True,
        delete_statistics=True,
        delete_check_results=True,
        delete_sensor_readouts=True,
        delete_error_samples=True,
        delete_incidents=True,
        delete_checks_configuration=False
    ),
    advanced_properties={

    },
    can_edit=False,
    can_collect_statistics=True,
    can_run_checks=True,
    can_delete_data=True
)

call_result = await test_connection.asyncio(
    client=dqops_client,
    json_body=request_body
)
Expand to see the returned result
ConnectionTestModel()