Download OpenAPI specification:Download
get the planned WrenSQL
Is Fallback (boolean) or Is Fallback (null) (Is Fallback) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "manifestStr": "string",
- "sql": "string"
}"string"get the dialect SQL for the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
Is Fallback (boolean) or Is Fallback (null) (Is Fallback) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "manifestStr": "string",
- "sql": "string"
}"string"get the constraints of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
Any of object (Connectioninfo) | |
{- "connectionInfo": { }
}[- {
- "constraintColumn": "string",
- "constraintName": "string",
- "constraintTable": "string",
- "constraintType": "PRIMARY KEY",
- "constraintedColumn": "string",
- "constraintedTable": "string"
}
]get the table list of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
Any of object (Connectioninfo) | |
{- "connectionInfo": { }
}[- {
- "columns": [
- {
- "description": "string",
- "name": "string",
- "nestedColumns": [
- { }
], - "notNull": true,
- "properties": { },
- "type": "string"
}
], - "description": "string",
- "name": "string",
- "primaryKey": "string",
- "properties": {
- "catalog": "string",
- "path": "string",
- "schema": "string",
- "table": "string"
}
}
]get the version of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
Any of object (Connectioninfo) | |
{- "connectionInfo": { }
}"string"get the SQL which table name is substituted
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
Is Fallback (boolean) or Is Fallback (null) (Is Fallback) |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "connectionInfo": { },
- "manifestStr": "string",
- "sql": "string"
}"string"query the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
| dryRun | boolean (Dryrun) Default: false enable dryRun mode for validating SQL only |
| cacheEnable | boolean (Cacheenable) Default: false enable query cache mode |
| overrideCache | boolean (Overridecache) Default: false ovrride the exist cache |
Limit (integer) or Limit (null) (Limit) limit the number of rows returned | |
Is Fallback (boolean) or Is Fallback (null) (Is Fallback) |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "connectionInfo": { },
- "manifestStr": "string",
- "sql": "string"
}nullvalidate the specified rule
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
| rule_name required | string (Rule Name) |
Is Fallback (boolean) or Is Fallback (null) (Is Fallback) |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| parameters required | object (Parameters) |
{- "connectionInfo": { },
- "manifestStr": "string",
- "parameters": { }
}nullget the planned WrenSQL
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "manifestStr": "string",
- "sql": "string"
}"string"get the dialect SQL for the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "manifestStr": "string",
- "sql": "string"
}"string"get the available function list of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
| function_name required | string (Function Name) |
nullget the available function list of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
nullget the SQL knowledge of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
nullget the schema metadata of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
Filterinfo (object) or Filterinfo (null) (Filterinfo) | |
Limit (integer) or Limit (null) (Limit) |
{- "connectionInfo": { },
- "filterInfo": { },
- "limit": 0
}[- {
- "name": "string",
- "schemas": [
- "string"
]
}
]get the table metadata of the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
Filterinfo (object) or Filterinfo (null) (Filterinfo) | |
Limit (integer) or Limit (null) (Limit) |
{- "connectionInfo": { },
- "filterInfo": { },
- "limit": 0
}[- {
- "columns": [
- {
- "description": "string",
- "name": "string",
- "nestedColumns": [
- { }
], - "notNull": true,
- "properties": { },
- "type": "string"
}
], - "description": "string",
- "name": "string",
- "primaryKey": "string",
- "properties": {
- "catalog": "string",
- "path": "string",
- "schema": "string",
- "table": "string"
}
}
]get the SQL which table name is substituted
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "connectionInfo": { },
- "manifestStr": "string",
- "sql": "string"
}"string"query the specified data source
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
| dryRun | boolean (Dryrun) Default: false enable dryRun mode for validating SQL only |
| cacheEnable | boolean (Cacheenable) Default: false enable query cache mode |
| overrideCache | boolean (Overridecache) Default: false ovrride the exist cache |
Limit (integer) or Limit (null) (Limit) limit the number of rows returned |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "connectionInfo": { },
- "manifestStr": "string",
- "sql": "string"
}nullvalidate the specified rule
| data_source required | string (DataSource) Enum: "athena" "bigquery" "canner" "clickhouse" "mssql" "mysql" "oracle" "postgres" "redshift" "snowflake" "trino" "local_file" "s3_file" "minio_file" "gcs_file" "spark" "databricks" |
| rule_name required | string (Rule Name) |
required | Connectioninfo (object) or AthenaConnectionInfo (object) or BigQueryDatasetConnectionInfo (object) or BigQueryProjectConnectionInfo (object) or CannerConnectionInfo (object) or ClickHouseConnectionInfo (object) or ConnectionUrl (object) or MSSqlConnectionInfo (object) or MySqlConnectionInfo (object) or OracleConnectionInfo (object) or PostgresConnectionInfo (object) or RedshiftConnectionInfo (object) or RedshiftIAMConnectionInfo (object) or SnowflakeConnectionInfo (object) or SparkConnectionInfo (object) or DatabricksTokenConnectionInfo (object) or TrinoConnectionInfo (object) or LocalFileConnectionInfo (object) or S3FileConnectionInfo (object) or MinioFileConnectionInfo (object) or GcsFileConnectionInfo (object) (Connectioninfo) |
| manifestStr required | string (Manifeststr) Base64 manifest |
| parameters required | object (Parameters) |
{- "connectionInfo": { },
- "manifestStr": "string",
- "parameters": { }
}null| manifestStr required | string (Manifeststr) Base64 manifest |
| sql required | string (Sql) |
{- "manifestStr": "string",
- "sql": "string"
}[- { }
]| manifestStr required | string (Manifeststr) Base64 manifest |
| sqls required | Array of strings (Sqls) |
{- "manifestStr": "string",
- "sqls": [
- "string"
]
}[- [
- { }
]
]Aws Access Key Id (string) or Aws Access Key Id (null) (Aws Access Key Id) AWS access key ID. Optional if using IAM role, web identity token, or default credential chain. | |
Aws Secret Access Key (string) or Aws Secret Access Key (null) (Aws Secret Access Key) AWS secret access key. Optional if using IAM role, web identity token, or default credential chain. | |
Aws Session Token (string) or Aws Session Token (null) (Aws Session Token) AWS session token (used for temporary credentials) | |
| region_name | string <password> (Region Name) AWS region for Athena. Optional; will use default region if not provided. |
Role Arn (string) or Role Arn (null) (Role Arn) The ARN of the role to assume with the web identity token. | |
Role Session Name (string) or Role Session Name (null) (Role Session Name) The session name when assuming a role (optional). | |
| s3_staging_dir required | string <password> (S3 Staging Dir) S3 staging directory for Athena queries |
Schema Name (string) or Schema Name (null) (Schema Name) Default: "**********" The database name in Athena. Defaults to 'default'. | |
Web Identity Token (string) or Web Identity Token (null) (Web Identity Token) OIDC web identity token (JWT) used for AssumeRoleWithWebIdentity authentication. If provided, PyAthena will call STS to exchange it for temporary credentials. |
{- "aws_access_key_id": "AKIA...",
- "aws_secret_access_key": "my-secret-key",
- "aws_session_token": "IQoJb3JpZ2luX2VjEJz//////////wEaCXVzLWVhc3QtMSJHMEUCIQD...",
- "region_name": "us-west-2",
- "role_arn": "arn:aws:iam::123456789012:role/YourAthenaRole",
- "role_session_name": "PyAthena-session",
- "s3_staging_dir": "s3://my-bucket/athena-staging/",
- "schema_name": "default",
- "web_identity_token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9..."
}| bigquery_type | string (Bigquery Type) Default: "dataset" Value: "dataset" |
| credentials required | string <password> (Credentials) Base64 encode |
| dataset_id required | string <password> (Dataset Id) BigQuery dataset id |
Job Timeout Ms (integer) or Job Timeout Ms (null) (Job Timeout Ms) Job timeout in milliseconds. If the job is not complete within the specified time, it will be cancelled. | |
| project_id required | string <password> (Project Id) GCP project id |
{- "bigquery_type": "dataset",
- "credentials": "eyJ...",
- "dataset_id": "my_dataset",
- "job_timeout_ms": 0,
- "project_id": "my-project"
}| bigquery_type | string (Bigquery Type) Default: "project" Value: "project" |
| billing_project_id required | string <password> (Billing Project Id) the billing project id of your BigQuery connection |
| credentials required | string <password> (Credentials) Base64 encode |
Job Timeout Ms (integer) or Job Timeout Ms (null) (Job Timeout Ms) Job timeout in milliseconds. If the job is not complete within the specified time, it will be cancelled. | |
| region required | string <password> (Region) the region of your BigQuery connection |
{- "bigquery_type": "project",
- "billing_project_id": "billing-project-1",
- "credentials": "eyJ...",
- "job_timeout_ms": 0,
- "region": "US"
}| enableSSL | boolean (Enablessl) Default: false Enable SSL connection |
| host required | string <password> (Host) the hostname of your database |
| pat required | string <password> (Pat) the personal access token of your database |
| port required | string <password> (Port) the port of your database |
| user required | string <password> (User) the username of your database |
| workspace required | string <password> (Workspace) the workspace of your database |
{- "enableSSL": false,
- "host": "localhost",
- "pat": "eyJ...",
- "port": "8080",
- "user": "admin",
- "workspace": "default"
}| database required | string <password> (Database) the database name of your database |
| host required | string <password> (Host) the hostname of your database |
Kwargs (object) or Kwargs (null) (Kwargs) Client specific keyword arguments | |
Password (string) or Password (null) (Password) the password of your database | |
| port required | string <password> (Port) the port of your database |
| secure | boolean (Secure) Default: false Whether or not to use an authenticated endpoint |
Settings (object) or Settings (null) (Settings) Additional settings for ClickHouse connection | |
| user required | string <password> (User) the username of your database |
{- "database": "default",
- "host": "localhost",
- "kwargs": {
- "property1": "string",
- "property2": "string"
}, - "password": "password",
- "port": "8123",
- "secure": true,
- "settings": {
- "max_execution_time": "60"
}, - "user": "default"
}| accessToken required | string <password> (Accesstoken) the access token for your Databricks instance |
| databricks_type | string (Databricks Type) Default: "token" Value: "token" |
| httpPath required | string <password> (Httppath) the HTTP path of your Databricks SQL warehouse |
| serverHostname required | string <password> (Serverhostname) the server hostname of your Databricks instance |
{- "accessToken": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
- "databricks_type": "token",
- "httpPath": "/sql/1.0/warehouses/xxxxxxxx",
- "serverHostname": "dbc-xxxxxxxx-xxxx.cloud.databricks.com"
}| bucket required | string <password> (Bucket) the name of the gcs bucket |
| credentials | string <password> (Credentials) Base64 encode |
| format | string (Format) Default: "csv" File format |
| key_id required | string <password> (Key Id) the key id of the gcs bucket |
| secret_key required | string <password> (Secret Key) the secret key of the gcs bucket |
| url | string <password> (Url) Default: "/" the root path of the gcs bucket |
{- "bucket": "my-bucket",
- "credentials": "eyJ...",
- "format": "csv",
- "key_id": "my-key-id",
- "secret_key": "my-secret-key",
- "url": "/data"
}| format | string (Format) Default: "csv" File format |
| url | string <password> (Url) Default: "/" the root path of the local file |
{- "format": "csv",
- "url": "/data"
}| TDS_Version | string (Tds Version) Default: "8.0" The TDS version to use |
| database required | string <password> (Database) the database name of your database |
| driver | string (Driver) Default: "ODBC Driver 18 for SQL Server" The ODBC driver to use |
| host required | string <password> (Host) the hostname of your database |
Kwargs (object) or Kwargs (null) (Kwargs) Additional keyword arguments to pass to PyODBC | |
Password (string) or Password (null) (Password) the password of your database | |
| port required | string <password> (Port) the port of your database |
| user required | string <password> (User) the username of your database |
{- "TDS_Version": "8.0",
- "database": "master",
- "driver": "ODBC Driver 18 for SQL Server",
- "host": "localhost",
- "kwargs": {
- "property1": "string",
- "property2": "string"
}, - "password": "password",
- "port": "1433",
- "user": "sa"
}| access_key required | string <password> (Access Key) the account of the minio bucket |
| bucket required | string <password> (Bucket) the name of the minio bucket |
| endpoint required | string <password> (Endpoint) the endpoint of the minio bucket |
| format | string (Format) Default: "csv" File format |
| secret_key required | string <password> (Secret Key) the The password of the minio bucket |
| ssl_enabled | boolean (Ssl Enabled) Default: false use the ssl connection or not |
| url | string <password> (Url) Default: "/" the root path of the minio bucket |
{- "access_key": "my-account",
- "bucket": "my-bucket",
- "endpoint": "localhost:9000",
- "format": "csv",
- "secret_key": "my-password",
- "ssl_enabled": true,
- "url": "/data"
}| database required | string <password> (Database) the database name of your database |
| host required | string <password> (Host) the hostname of your database |
Kwargs (object) or Kwargs (null) (Kwargs) Additional keyword arguments to pass to PyMySQL | |
Password (string) or Password (null) (Password) the password of your database | |
| port required | string <password> (Port) the port of your database |
Sslca (string) or Sslca (null) (Sslca) The path to the CA certificate file | |
Sslmode (string) or Sslmode (null) (Sslmode) Default: "**********" Use ssl connection or not. The default value is | |
| user required | string <password> (User) the username of your database |
{- "database": "default",
- "host": "localhost",
- "kwargs": {
- "property1": "string",
- "property2": "string"
}, - "password": "password",
- "port": "3306",
- "sslMode": "DISABLED",
- "user": "root"
}| database | string <password> (Database) Default: "orcl" the database name of your database |
Dsn (string) or Dsn (null) (Dsn) An Oracle Data Source Name. If provided, overrides all other connection arguments except username and password. | |
| host | string <password> (Host) Default: "localhost" the hostname of your database |
Password (string) or Password (null) (Password) the password of your database | |
| port | string <password> (Port) Default: "1521" the port of your database |
| user required | string <password> (User) the username of your database |
{- "database": "orcl",
- "dsn": "localhost:1521/orcl",
- "host": "localhost",
- "password": "password",
- "port": 1521,
- "user": "admin"
}| database required | string <password> (Database) the database name of your database |
| host required | string <password> (Host) the hostname of your database |
Kwargs (object) or Kwargs (null) (Kwargs) Additional keyword arguments to pass to the backend client connection. | |
Password (string) or Password (null) (Password) the password of your database | |
| port required | string <password> (Port) the port of your database |
| user required | string <password> (User) the username of your database |
{- "database": "postgres",
- "host": "localhost",
- "kwargs": {
- "property1": "string",
- "property2": "string"
}, - "password": "password",
- "port": 5432,
- "user": "postgres"
}| database required | string <password> (Database) the database name of your database |
| host required | string <password> (Host) the hostname of your database |
| password required | string <password> (Password) the password of your database |
| port required | string <password> (Port) the port of your database |
| redshift_type | string (Redshift Type) Default: "redshift" Value: "redshift" |
| user required | string <password> (User) the username of your database |
{- "database": "dev",
- "host": "localhost",
- "password": "password",
- "port": "5439",
- "redshift_type": "redshift",
- "user": "awsuser"
}| access_key_id required | string <password> (Access Key Id) the access key id of your database |
| access_key_secret required | string <password> (Access Key Secret) the secret access key of your database |
| cluster_identifier required | string <password> (Cluster Identifier) the cluster identifier of your Redshift cluster |
| database required | string <password> (Database) the database name of your database |
| redshift_type | string (Redshift Type) Default: "redshift_iam" Value: "redshift_iam" |
| region required | string <password> (Region) the region of your database |
| user required | string <password> (User) the username of your database |
{- "access_key_id": "AKIA...",
- "access_key_secret": "my-secret-key",
- "cluster_identifier": "my-redshift-cluster",
- "database": "dev",
- "redshift_type": "redshift_iam",
- "region": "us-west-2",
- "user": "awsuser"
}| access_key required | string <password> (Access Key) the access key of the s3 bucket |
| bucket required | string <password> (Bucket) the name of the s3 bucket |
| format | string (Format) Default: "csv" File format |
| region required | string <password> (Region) the region of the s3 bucket |
| secret_key required | string <password> (Secret Key) the secret key of the s3 bucket |
| url | string <password> (Url) Default: "/" the root path of the s3 bucket |
{- "access_key": "my-access-key",
- "bucket": "my-bucket",
- "format": "csv",
- "region": "us-west-2",
- "secret_key": "my-secret-key",
- "url": "/data"
}| account required | string <password> (Account) the account name of your database |
| database required | string <password> (Database) the database name of your database |
Kwargs (object) or Kwargs (null) (Kwargs) Additional arguments passed to the DBAPI connection call. | |
Password (string) or Password (null) (Password) the password of your database | |
Private Key (string) or Private Key (null) (Private Key) the private key for key pair authentication | |
| schema required | string <password> (Schema) the schema name of your database |
| user required | string <password> (User) the username of your database |
Warehouse (string) or Warehouse (null) (Warehouse) the warehouse name of your database |
{- "account": "myaccount",
- "database": "mydb",
- "kwargs": {
- "property1": "string",
- "property2": "string"
}, - "password": "password",
- "private_key": "private_key_content",
- "schema": "myschema",
- "user": "admin",
- "warehouse": "COMPUTE_WH"
}| host required | string <password> (Host) Spark Connect server hostname |
| port required | string <password> (Port) the port of your spark connect server |
{- "host": "localhost",
- "port": "pa$$word"
}| catalog required | string <password> (Catalog) the catalog name of your database |
| host required | string <password> (Host) the hostname of your database |
Kwargs (object) or Kwargs (null) (Kwargs) Additional keyword arguments passed directly to the trino.dbapi.connect API. | |
Password (string) or Password (null) (Password) the password of your database | |
| port | string <password> (Port) Default: "8080" the port of your database |
| schema required | string <password> (Schema) the schema name of your database |
User (string) or User (null) (User) the username of your database |
{- "catalog": "hive",
- "host": "localhost",
- "kwargs": {
- "property1": "string",
- "property2": "string"
}, - "password": "password",
- "port": "8080",
- "schema": "default",
- "user": "admin"
}