1. Packages
  2. Airbyte Provider
  3. API Docs
  4. SourceAzureBlobStorage
airbyte 0.9.0 published on Wednesday, Apr 9, 2025 by airbytehq

airbyte.SourceAzureBlobStorage

Explore with Pulumi AI

SourceAzureBlobStorage Resource

Example Usage

Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.SourceAzureBlobStorage;
import com.pulumi.airbyte.SourceAzureBlobStorageArgs;
import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationArgs;
import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationCredentialsArgs;
import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var mySourceAzureblobstorage = new SourceAzureBlobStorage("mySourceAzureblobstorage", SourceAzureBlobStorageArgs.builder()
            .configuration(SourceAzureBlobStorageConfigurationArgs.builder()
                .azure_blob_storage_account_name("airbyte5storage")
                .azure_blob_storage_container_name("airbytetescontainername")
                .azure_blob_storage_endpoint("blob.core.windows.net")
                .credentials(SourceAzureBlobStorageConfigurationCredentialsArgs.builder()
                    .authenticateViaStorageAccountKey(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs.builder()
                        .azureBlobStorageAccountKey("Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==")
                        .build())
                    .build())
                .start_date("2021-01-01T00:00:00.000000Z")
                .streams(SourceAzureBlobStorageConfigurationStreamArgs.builder()
                    .daysToSyncIfHistoryIsFull(1)
                    .format(SourceAzureBlobStorageConfigurationStreamFormatArgs.builder()
                        .csvFormat(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs.builder()
                            .delimiter("...my_delimiter...")
                            .doubleQuote(true)
                            .encoding("...my_encoding...")
                            .escapeChar("...my_escape_char...")
                            .falseValues("...")
                            .headerDefinition(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs.builder()
                                .userProvided(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs.builder()
                                    .columnNames("...")
                                    .build())
                                .build())
                            .ignoreErrorsOnFieldsMismatch(false)
                            .nullValues("...")
                            .quoteChar("...my_quote_char...")
                            .skipRowsAfterHeader(5)
                            .skipRowsBeforeHeader(0)
                            .stringsCanBeNull(true)
                            .trueValues("...")
                            .build())
                        .build())
                    .globs("...")
                    .inputSchema("...my_input_schema...")
                    .name("...my_name...")
                    .recentNFilesToReadForSchemaDiscovery(2)
                    .schemaless(true)
                    .validationPolicy("Wait for Discover")
                    .build())
                .build())
            .definitionId("3385920f-d837-42e0-b72d-7927f28bf9f2")
            .secretId("...my_secret_id...")
            .workspaceId("2c3aeaad-c70f-44a8-a981-aca12752c864")
            .build());

    }
}
Copy
resources:
  mySourceAzureblobstorage:
    type: airbyte:SourceAzureBlobStorage
    properties:
      configuration:
        azure_blob_storage_account_name: airbyte5storage
        azure_blob_storage_container_name: airbytetescontainername
        azure_blob_storage_endpoint: blob.core.windows.net
        credentials:
          authenticateViaStorageAccountKey:
            azureBlobStorageAccountKey: Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==
        start_date: 2021-01-01T00:00:00.000000Z
        streams:
          - daysToSyncIfHistoryIsFull: 1
            format:
              csvFormat:
                delimiter: '...my_delimiter...'
                doubleQuote: true
                encoding: '...my_encoding...'
                escapeChar: '...my_escape_char...'
                falseValues:
                  - '...'
                headerDefinition:
                  userProvided:
                    columnNames:
                      - '...'
                ignoreErrorsOnFieldsMismatch: false
                nullValues:
                  - '...'
                quoteChar: '...my_quote_char...'
                skipRowsAfterHeader: 5
                skipRowsBeforeHeader: 0
                stringsCanBeNull: true
                trueValues:
                  - '...'
            globs:
              - '...'
            inputSchema: '...my_input_schema...'
            name: '...my_name...'
            recentNFilesToReadForSchemaDiscovery: 2
            schemaless: true
            validationPolicy: Wait for Discover
      definitionId: 3385920f-d837-42e0-b72d-7927f28bf9f2
      secretId: '...my_secret_id...'
      workspaceId: 2c3aeaad-c70f-44a8-a981-aca12752c864
Copy

Create SourceAzureBlobStorage Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new SourceAzureBlobStorage(name: string, args: SourceAzureBlobStorageArgs, opts?: CustomResourceOptions);
@overload
def SourceAzureBlobStorage(resource_name: str,
                           args: SourceAzureBlobStorageArgs,
                           opts: Optional[ResourceOptions] = None)

@overload
def SourceAzureBlobStorage(resource_name: str,
                           opts: Optional[ResourceOptions] = None,
                           configuration: Optional[SourceAzureBlobStorageConfigurationArgs] = None,
                           workspace_id: Optional[str] = None,
                           definition_id: Optional[str] = None,
                           name: Optional[str] = None,
                           secret_id: Optional[str] = None)
func NewSourceAzureBlobStorage(ctx *Context, name string, args SourceAzureBlobStorageArgs, opts ...ResourceOption) (*SourceAzureBlobStorage, error)
public SourceAzureBlobStorage(string name, SourceAzureBlobStorageArgs args, CustomResourceOptions? opts = null)
public SourceAzureBlobStorage(String name, SourceAzureBlobStorageArgs args)
public SourceAzureBlobStorage(String name, SourceAzureBlobStorageArgs args, CustomResourceOptions options)
type: airbyte:SourceAzureBlobStorage
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. SourceAzureBlobStorageArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. SourceAzureBlobStorageArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. SourceAzureBlobStorageArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. SourceAzureBlobStorageArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. SourceAzureBlobStorageArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var sourceAzureBlobStorageResource = new Airbyte.SourceAzureBlobStorage("sourceAzureBlobStorageResource", new()
{
    Configuration = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationArgs
    {
        AzureBlobStorageAccountName = "string",
        AzureBlobStorageContainerName = "string",
        Credentials = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsArgs
        {
            AuthenticateViaClientCredentials = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs
            {
                AppClientId = "string",
                AppClientSecret = "string",
                AppTenantId = "string",
            },
            AuthenticateViaOauth2 = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args
            {
                ClientId = "string",
                ClientSecret = "string",
                RefreshToken = "string",
                TenantId = "string",
            },
            AuthenticateViaStorageAccountKey = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs
            {
                AzureBlobStorageAccountKey = "string",
            },
        },
        Streams = new[]
        {
            new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamArgs
            {
                Format = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatArgs
                {
                    AvroFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs
                    {
                        DoubleAsString = false,
                    },
                    CsvFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs
                    {
                        Delimiter = "string",
                        DoubleQuote = false,
                        Encoding = "string",
                        EscapeChar = "string",
                        FalseValues = new[]
                        {
                            "string",
                        },
                        HeaderDefinition = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs
                        {
                            Autogenerated = null,
                            FromCsv = null,
                            UserProvided = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs
                            {
                                ColumnNames = new[]
                                {
                                    "string",
                                },
                            },
                        },
                        IgnoreErrorsOnFieldsMismatch = false,
                        NullValues = new[]
                        {
                            "string",
                        },
                        QuoteChar = "string",
                        SkipRowsAfterHeader = 0,
                        SkipRowsBeforeHeader = 0,
                        StringsCanBeNull = false,
                        TrueValues = new[]
                        {
                            "string",
                        },
                    },
                    JsonlFormat = null,
                    ParquetFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs
                    {
                        DecimalAsFloat = false,
                    },
                    UnstructuredDocumentFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs
                    {
                        Processing = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs
                        {
                            Local = null,
                        },
                        SkipUnprocessableFiles = false,
                        Strategy = "string",
                    },
                },
                Name = "string",
                DaysToSyncIfHistoryIsFull = 0,
                Globs = new[]
                {
                    "string",
                },
                InputSchema = "string",
                RecentNFilesToReadForSchemaDiscovery = 0,
                Schemaless = false,
                ValidationPolicy = "string",
            },
        },
        AzureBlobStorageEndpoint = "string",
        StartDate = "string",
    },
    WorkspaceId = "string",
    DefinitionId = "string",
    Name = "string",
    SecretId = "string",
});
Copy
example, err := airbyte.NewSourceAzureBlobStorage(ctx, "sourceAzureBlobStorageResource", &airbyte.SourceAzureBlobStorageArgs{
Configuration: &.SourceAzureBlobStorageConfigurationArgs{
AzureBlobStorageAccountName: pulumi.String("string"),
AzureBlobStorageContainerName: pulumi.String("string"),
Credentials: &.SourceAzureBlobStorageConfigurationCredentialsArgs{
AuthenticateViaClientCredentials: &.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs{
AppClientId: pulumi.String("string"),
AppClientSecret: pulumi.String("string"),
AppTenantId: pulumi.String("string"),
},
AuthenticateViaOauth2: &.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args{
ClientId: pulumi.String("string"),
ClientSecret: pulumi.String("string"),
RefreshToken: pulumi.String("string"),
TenantId: pulumi.String("string"),
},
AuthenticateViaStorageAccountKey: &.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs{
AzureBlobStorageAccountKey: pulumi.String("string"),
},
},
Streams: .SourceAzureBlobStorageConfigurationStreamArray{
&.SourceAzureBlobStorageConfigurationStreamArgs{
Format: &.SourceAzureBlobStorageConfigurationStreamFormatArgs{
AvroFormat: &.SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs{
DoubleAsString: pulumi.Bool(false),
},
CsvFormat: &.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs{
Delimiter: pulumi.String("string"),
DoubleQuote: pulumi.Bool(false),
Encoding: pulumi.String("string"),
EscapeChar: pulumi.String("string"),
FalseValues: pulumi.StringArray{
pulumi.String("string"),
},
HeaderDefinition: &.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs{
Autogenerated: &.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionAutogeneratedArgs{
},
FromCsv: &.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionFromCsvArgs{
},
UserProvided: &.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs{
ColumnNames: pulumi.StringArray{
pulumi.String("string"),
},
},
},
IgnoreErrorsOnFieldsMismatch: pulumi.Bool(false),
NullValues: pulumi.StringArray{
pulumi.String("string"),
},
QuoteChar: pulumi.String("string"),
SkipRowsAfterHeader: pulumi.Float64(0),
SkipRowsBeforeHeader: pulumi.Float64(0),
StringsCanBeNull: pulumi.Bool(false),
TrueValues: pulumi.StringArray{
pulumi.String("string"),
},
},
JsonlFormat: &.SourceAzureBlobStorageConfigurationStreamFormatJsonlFormatArgs{
},
ParquetFormat: &.SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs{
DecimalAsFloat: pulumi.Bool(false),
},
UnstructuredDocumentFormat: &.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs{
Processing: &.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs{
Local: &.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocalArgs{
},
},
SkipUnprocessableFiles: pulumi.Bool(false),
Strategy: pulumi.String("string"),
},
},
Name: pulumi.String("string"),
DaysToSyncIfHistoryIsFull: pulumi.Float64(0),
Globs: pulumi.StringArray{
pulumi.String("string"),
},
InputSchema: pulumi.String("string"),
RecentNFilesToReadForSchemaDiscovery: pulumi.Float64(0),
Schemaless: pulumi.Bool(false),
ValidationPolicy: pulumi.String("string"),
},
},
AzureBlobStorageEndpoint: pulumi.String("string"),
StartDate: pulumi.String("string"),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
SecretId: pulumi.String("string"),
})
Copy
var sourceAzureBlobStorageResource = new SourceAzureBlobStorage("sourceAzureBlobStorageResource", SourceAzureBlobStorageArgs.builder()
    .configuration(SourceAzureBlobStorageConfigurationArgs.builder()
        .azureBlobStorageAccountName("string")
        .azureBlobStorageContainerName("string")
        .credentials(SourceAzureBlobStorageConfigurationCredentialsArgs.builder()
            .authenticateViaClientCredentials(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs.builder()
                .appClientId("string")
                .appClientSecret("string")
                .appTenantId("string")
                .build())
            .authenticateViaOauth2(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args.builder()
                .clientId("string")
                .clientSecret("string")
                .refreshToken("string")
                .tenantId("string")
                .build())
            .authenticateViaStorageAccountKey(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs.builder()
                .azureBlobStorageAccountKey("string")
                .build())
            .build())
        .streams(SourceAzureBlobStorageConfigurationStreamArgs.builder()
            .format(SourceAzureBlobStorageConfigurationStreamFormatArgs.builder()
                .avroFormat(SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs.builder()
                    .doubleAsString(false)
                    .build())
                .csvFormat(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs.builder()
                    .delimiter("string")
                    .doubleQuote(false)
                    .encoding("string")
                    .escapeChar("string")
                    .falseValues("string")
                    .headerDefinition(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs.builder()
                        .autogenerated()
                        .fromCsv()
                        .userProvided(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs.builder()
                            .columnNames("string")
                            .build())
                        .build())
                    .ignoreErrorsOnFieldsMismatch(false)
                    .nullValues("string")
                    .quoteChar("string")
                    .skipRowsAfterHeader(0)
                    .skipRowsBeforeHeader(0)
                    .stringsCanBeNull(false)
                    .trueValues("string")
                    .build())
                .jsonlFormat()
                .parquetFormat(SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs.builder()
                    .decimalAsFloat(false)
                    .build())
                .unstructuredDocumentFormat(SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs.builder()
                    .processing(SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs.builder()
                        .local()
                        .build())
                    .skipUnprocessableFiles(false)
                    .strategy("string")
                    .build())
                .build())
            .name("string")
            .daysToSyncIfHistoryIsFull(0)
            .globs("string")
            .inputSchema("string")
            .recentNFilesToReadForSchemaDiscovery(0)
            .schemaless(false)
            .validationPolicy("string")
            .build())
        .azureBlobStorageEndpoint("string")
        .startDate("string")
        .build())
    .workspaceId("string")
    .definitionId("string")
    .name("string")
    .secretId("string")
    .build());
Copy
source_azure_blob_storage_resource = airbyte.SourceAzureBlobStorage("sourceAzureBlobStorageResource",
    configuration={
        "azure_blob_storage_account_name": "string",
        "azure_blob_storage_container_name": "string",
        "credentials": {
            "authenticate_via_client_credentials": {
                "app_client_id": "string",
                "app_client_secret": "string",
                "app_tenant_id": "string",
            },
            "authenticate_via_oauth2": {
                "client_id": "string",
                "client_secret": "string",
                "refresh_token": "string",
                "tenant_id": "string",
            },
            "authenticate_via_storage_account_key": {
                "azure_blob_storage_account_key": "string",
            },
        },
        "streams": [{
            "format": {
                "avro_format": {
                    "double_as_string": False,
                },
                "csv_format": {
                    "delimiter": "string",
                    "double_quote": False,
                    "encoding": "string",
                    "escape_char": "string",
                    "false_values": ["string"],
                    "header_definition": {
                        "autogenerated": {},
                        "from_csv": {},
                        "user_provided": {
                            "column_names": ["string"],
                        },
                    },
                    "ignore_errors_on_fields_mismatch": False,
                    "null_values": ["string"],
                    "quote_char": "string",
                    "skip_rows_after_header": 0,
                    "skip_rows_before_header": 0,
                    "strings_can_be_null": False,
                    "true_values": ["string"],
                },
                "jsonl_format": {},
                "parquet_format": {
                    "decimal_as_float": False,
                },
                "unstructured_document_format": {
                    "processing": {
                        "local": {},
                    },
                    "skip_unprocessable_files": False,
                    "strategy": "string",
                },
            },
            "name": "string",
            "days_to_sync_if_history_is_full": 0,
            "globs": ["string"],
            "input_schema": "string",
            "recent_n_files_to_read_for_schema_discovery": 0,
            "schemaless": False,
            "validation_policy": "string",
        }],
        "azure_blob_storage_endpoint": "string",
        "start_date": "string",
    },
    workspace_id="string",
    definition_id="string",
    name="string",
    secret_id="string")
Copy
const sourceAzureBlobStorageResource = new airbyte.SourceAzureBlobStorage("sourceAzureBlobStorageResource", {
    configuration: {
        azureBlobStorageAccountName: "string",
        azureBlobStorageContainerName: "string",
        credentials: {
            authenticateViaClientCredentials: {
                appClientId: "string",
                appClientSecret: "string",
                appTenantId: "string",
            },
            authenticateViaOauth2: {
                clientId: "string",
                clientSecret: "string",
                refreshToken: "string",
                tenantId: "string",
            },
            authenticateViaStorageAccountKey: {
                azureBlobStorageAccountKey: "string",
            },
        },
        streams: [{
            format: {
                avroFormat: {
                    doubleAsString: false,
                },
                csvFormat: {
                    delimiter: "string",
                    doubleQuote: false,
                    encoding: "string",
                    escapeChar: "string",
                    falseValues: ["string"],
                    headerDefinition: {
                        autogenerated: {},
                        fromCsv: {},
                        userProvided: {
                            columnNames: ["string"],
                        },
                    },
                    ignoreErrorsOnFieldsMismatch: false,
                    nullValues: ["string"],
                    quoteChar: "string",
                    skipRowsAfterHeader: 0,
                    skipRowsBeforeHeader: 0,
                    stringsCanBeNull: false,
                    trueValues: ["string"],
                },
                jsonlFormat: {},
                parquetFormat: {
                    decimalAsFloat: false,
                },
                unstructuredDocumentFormat: {
                    processing: {
                        local: {},
                    },
                    skipUnprocessableFiles: false,
                    strategy: "string",
                },
            },
            name: "string",
            daysToSyncIfHistoryIsFull: 0,
            globs: ["string"],
            inputSchema: "string",
            recentNFilesToReadForSchemaDiscovery: 0,
            schemaless: false,
            validationPolicy: "string",
        }],
        azureBlobStorageEndpoint: "string",
        startDate: "string",
    },
    workspaceId: "string",
    definitionId: "string",
    name: "string",
    secretId: "string",
});
Copy
type: airbyte:SourceAzureBlobStorage
properties:
    configuration:
        azureBlobStorageAccountName: string
        azureBlobStorageContainerName: string
        azureBlobStorageEndpoint: string
        credentials:
            authenticateViaClientCredentials:
                appClientId: string
                appClientSecret: string
                appTenantId: string
            authenticateViaOauth2:
                clientId: string
                clientSecret: string
                refreshToken: string
                tenantId: string
            authenticateViaStorageAccountKey:
                azureBlobStorageAccountKey: string
        startDate: string
        streams:
            - daysToSyncIfHistoryIsFull: 0
              format:
                avroFormat:
                    doubleAsString: false
                csvFormat:
                    delimiter: string
                    doubleQuote: false
                    encoding: string
                    escapeChar: string
                    falseValues:
                        - string
                    headerDefinition:
                        autogenerated: {}
                        fromCsv: {}
                        userProvided:
                            columnNames:
                                - string
                    ignoreErrorsOnFieldsMismatch: false
                    nullValues:
                        - string
                    quoteChar: string
                    skipRowsAfterHeader: 0
                    skipRowsBeforeHeader: 0
                    stringsCanBeNull: false
                    trueValues:
                        - string
                jsonlFormat: {}
                parquetFormat:
                    decimalAsFloat: false
                unstructuredDocumentFormat:
                    processing:
                        local: {}
                    skipUnprocessableFiles: false
                    strategy: string
              globs:
                - string
              inputSchema: string
              name: string
              recentNFilesToReadForSchemaDiscovery: 0
              schemaless: false
              validationPolicy: string
    definitionId: string
    name: string
    secretId: string
    workspaceId: string
Copy

SourceAzureBlobStorage Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The SourceAzureBlobStorage resource accepts the following input properties:

Configuration This property is required. SourceAzureBlobStorageConfiguration
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
WorkspaceId This property is required. string
DefinitionId string
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
Name string
Name of the source e.g. dev-mysql-instance.
SecretId string
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
Configuration This property is required. SourceAzureBlobStorageConfigurationArgs
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
WorkspaceId This property is required. string
DefinitionId string
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
Name string
Name of the source e.g. dev-mysql-instance.
SecretId string
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
configuration This property is required. SourceAzureBlobStorageConfiguration
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
workspaceId This property is required. String
definitionId String
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name String
Name of the source e.g. dev-mysql-instance.
secretId String
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
configuration This property is required. SourceAzureBlobStorageConfiguration
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
workspaceId This property is required. string
definitionId string
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name string
Name of the source e.g. dev-mysql-instance.
secretId string
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
configuration This property is required. SourceAzureBlobStorageConfigurationArgs
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
workspace_id This property is required. str
definition_id str
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name str
Name of the source e.g. dev-mysql-instance.
secret_id str
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
configuration This property is required. Property Map
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
workspaceId This property is required. String
definitionId String
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name String
Name of the source e.g. dev-mysql-instance.
secretId String
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.

Outputs

All input properties are implicitly available as output properties. Additionally, the SourceAzureBlobStorage resource produces the following output properties:

CreatedAt double
Id string
The provider-assigned unique ID for this managed resource.
ResourceAllocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
SourceId string
SourceType string
CreatedAt float64
Id string
The provider-assigned unique ID for this managed resource.
ResourceAllocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
SourceId string
SourceType string
createdAt Double
id String
The provider-assigned unique ID for this managed resource.
resourceAllocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
sourceId String
sourceType String
createdAt number
id string
The provider-assigned unique ID for this managed resource.
resourceAllocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
sourceId string
sourceType string
created_at float
id str
The provider-assigned unique ID for this managed resource.
resource_allocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
source_id str
source_type str
createdAt Number
id String
The provider-assigned unique ID for this managed resource.
resourceAllocation Property Map
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
sourceId String
sourceType String

Look up Existing SourceAzureBlobStorage Resource

Get an existing SourceAzureBlobStorage resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: SourceAzureBlobStorageState, opts?: CustomResourceOptions): SourceAzureBlobStorage
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        configuration: Optional[SourceAzureBlobStorageConfigurationArgs] = None,
        created_at: Optional[float] = None,
        definition_id: Optional[str] = None,
        name: Optional[str] = None,
        resource_allocation: Optional[SourceAzureBlobStorageResourceAllocationArgs] = None,
        secret_id: Optional[str] = None,
        source_id: Optional[str] = None,
        source_type: Optional[str] = None,
        workspace_id: Optional[str] = None) -> SourceAzureBlobStorage
func GetSourceAzureBlobStorage(ctx *Context, name string, id IDInput, state *SourceAzureBlobStorageState, opts ...ResourceOption) (*SourceAzureBlobStorage, error)
public static SourceAzureBlobStorage Get(string name, Input<string> id, SourceAzureBlobStorageState? state, CustomResourceOptions? opts = null)
public static SourceAzureBlobStorage get(String name, Output<String> id, SourceAzureBlobStorageState state, CustomResourceOptions options)
resources:  _:    type: airbyte:SourceAzureBlobStorage    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Configuration SourceAzureBlobStorageConfiguration
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
CreatedAt double
DefinitionId string
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
Name string
Name of the source e.g. dev-mysql-instance.
ResourceAllocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
SecretId string
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
SourceId string
SourceType string
WorkspaceId string
Configuration SourceAzureBlobStorageConfigurationArgs
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
CreatedAt float64
DefinitionId string
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
Name string
Name of the source e.g. dev-mysql-instance.
ResourceAllocation SourceAzureBlobStorageResourceAllocationArgs
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
SecretId string
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
SourceId string
SourceType string
WorkspaceId string
configuration SourceAzureBlobStorageConfiguration
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
createdAt Double
definitionId String
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name String
Name of the source e.g. dev-mysql-instance.
resourceAllocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
secretId String
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
sourceId String
sourceType String
workspaceId String
configuration SourceAzureBlobStorageConfiguration
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
createdAt number
definitionId string
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name string
Name of the source e.g. dev-mysql-instance.
resourceAllocation SourceAzureBlobStorageResourceAllocation
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
secretId string
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
sourceId string
sourceType string
workspaceId string
configuration SourceAzureBlobStorageConfigurationArgs
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
created_at float
definition_id str
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name str
Name of the source e.g. dev-mysql-instance.
resource_allocation SourceAzureBlobStorageResourceAllocationArgs
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
secret_id str
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
source_id str
source_type str
workspace_id str
configuration Property Map
NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
createdAt Number
definitionId String
The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
name String
Name of the source e.g. dev-mysql-instance.
resourceAllocation Property Map
actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
secretId String
Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
sourceId String
sourceType String
workspaceId String

Supporting Types

SourceAzureBlobStorageConfiguration
, SourceAzureBlobStorageConfigurationArgs

AzureBlobStorageAccountName This property is required. string
The account's name of the Azure Blob Storage.
AzureBlobStorageContainerName This property is required. string
The name of the Azure blob storage container.
Credentials This property is required. SourceAzureBlobStorageConfigurationCredentials
Credentials for connecting to the Azure Blob Storage
Streams This property is required. List<SourceAzureBlobStorageConfigurationStream>
Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
AzureBlobStorageEndpoint string
This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
StartDate string
UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
AzureBlobStorageAccountName This property is required. string
The account's name of the Azure Blob Storage.
AzureBlobStorageContainerName This property is required. string
The name of the Azure blob storage container.
Credentials This property is required. SourceAzureBlobStorageConfigurationCredentials
Credentials for connecting to the Azure Blob Storage
Streams This property is required. []SourceAzureBlobStorageConfigurationStream
Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
AzureBlobStorageEndpoint string
This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
StartDate string
UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
azureBlobStorageAccountName This property is required. String
The account's name of the Azure Blob Storage.
azureBlobStorageContainerName This property is required. String
The name of the Azure blob storage container.
credentials This property is required. SourceAzureBlobStorageConfigurationCredentials
Credentials for connecting to the Azure Blob Storage
streams This property is required. List<SourceAzureBlobStorageConfigurationStream>
Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
azureBlobStorageEndpoint String
This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
startDate String
UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
azureBlobStorageAccountName This property is required. string
The account's name of the Azure Blob Storage.
azureBlobStorageContainerName This property is required. string
The name of the Azure blob storage container.
credentials This property is required. SourceAzureBlobStorageConfigurationCredentials
Credentials for connecting to the Azure Blob Storage
streams This property is required. SourceAzureBlobStorageConfigurationStream[]
Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
azureBlobStorageEndpoint string
This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
startDate string
UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
azure_blob_storage_account_name This property is required. str
The account's name of the Azure Blob Storage.
azure_blob_storage_container_name This property is required. str
The name of the Azure blob storage container.
credentials This property is required. SourceAzureBlobStorageConfigurationCredentials
Credentials for connecting to the Azure Blob Storage
streams This property is required. Sequence[SourceAzureBlobStorageConfigurationStream]
Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
azure_blob_storage_endpoint str
This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
start_date str
UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
azureBlobStorageAccountName This property is required. String
The account's name of the Azure Blob Storage.
azureBlobStorageContainerName This property is required. String
The name of the Azure blob storage container.
credentials This property is required. Property Map
Credentials for connecting to the Azure Blob Storage
streams This property is required. List<Property Map>
Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
azureBlobStorageEndpoint String
This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
startDate String
UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.

SourceAzureBlobStorageConfigurationCredentials
, SourceAzureBlobStorageConfigurationCredentialsArgs

SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentials
, SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs

AppClientId This property is required. string
Client ID of your Microsoft developer application
AppClientSecret This property is required. string
Client Secret of your Microsoft developer application
AppTenantId This property is required. string
Tenant ID of the Microsoft Azure Application
AppClientId This property is required. string
Client ID of your Microsoft developer application
AppClientSecret This property is required. string
Client Secret of your Microsoft developer application
AppTenantId This property is required. string
Tenant ID of the Microsoft Azure Application
appClientId This property is required. String
Client ID of your Microsoft developer application
appClientSecret This property is required. String
Client Secret of your Microsoft developer application
appTenantId This property is required. String
Tenant ID of the Microsoft Azure Application
appClientId This property is required. string
Client ID of your Microsoft developer application
appClientSecret This property is required. string
Client Secret of your Microsoft developer application
appTenantId This property is required. string
Tenant ID of the Microsoft Azure Application
app_client_id This property is required. str
Client ID of your Microsoft developer application
app_client_secret This property is required. str
Client Secret of your Microsoft developer application
app_tenant_id This property is required. str
Tenant ID of the Microsoft Azure Application
appClientId This property is required. String
Client ID of your Microsoft developer application
appClientSecret This property is required. String
Client Secret of your Microsoft developer application
appTenantId This property is required. String
Tenant ID of the Microsoft Azure Application

SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2
, SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args

ClientId This property is required. string
Client ID of your Microsoft developer application
ClientSecret This property is required. string
Client Secret of your Microsoft developer application
RefreshToken This property is required. string
Refresh Token of your Microsoft developer application
TenantId This property is required. string
Tenant ID of the Microsoft Azure Application user
ClientId This property is required. string
Client ID of your Microsoft developer application
ClientSecret This property is required. string
Client Secret of your Microsoft developer application
RefreshToken This property is required. string
Refresh Token of your Microsoft developer application
TenantId This property is required. string
Tenant ID of the Microsoft Azure Application user
clientId This property is required. String
Client ID of your Microsoft developer application
clientSecret This property is required. String
Client Secret of your Microsoft developer application
refreshToken This property is required. String
Refresh Token of your Microsoft developer application
tenantId This property is required. String
Tenant ID of the Microsoft Azure Application user
clientId This property is required. string
Client ID of your Microsoft developer application
clientSecret This property is required. string
Client Secret of your Microsoft developer application
refreshToken This property is required. string
Refresh Token of your Microsoft developer application
tenantId This property is required. string
Tenant ID of the Microsoft Azure Application user
client_id This property is required. str
Client ID of your Microsoft developer application
client_secret This property is required. str
Client Secret of your Microsoft developer application
refresh_token This property is required. str
Refresh Token of your Microsoft developer application
tenant_id This property is required. str
Tenant ID of the Microsoft Azure Application user
clientId This property is required. String
Client ID of your Microsoft developer application
clientSecret This property is required. String
Client Secret of your Microsoft developer application
refreshToken This property is required. String
Refresh Token of your Microsoft developer application
tenantId This property is required. String
Tenant ID of the Microsoft Azure Application user

SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKey
, SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs

AzureBlobStorageAccountKey This property is required. string
The Azure blob storage account key.
AzureBlobStorageAccountKey This property is required. string
The Azure blob storage account key.
azureBlobStorageAccountKey This property is required. String
The Azure blob storage account key.
azureBlobStorageAccountKey This property is required. string
The Azure blob storage account key.
azure_blob_storage_account_key This property is required. str
The Azure blob storage account key.
azureBlobStorageAccountKey This property is required. String
The Azure blob storage account key.

SourceAzureBlobStorageConfigurationStream
, SourceAzureBlobStorageConfigurationStreamArgs

Format This property is required. SourceAzureBlobStorageConfigurationStreamFormat
The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
Name This property is required. string
The name of the stream.
DaysToSyncIfHistoryIsFull double
When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
Globs List<string>
The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
InputSchema string
The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
RecentNFilesToReadForSchemaDiscovery double
The number of resent files which will be used to discover the schema for this stream.
Schemaless bool
When enabled, syncs will not validate or structure records against the stream's schema. Default: false
ValidationPolicy string
The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
Format This property is required. SourceAzureBlobStorageConfigurationStreamFormat
The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
Name This property is required. string
The name of the stream.
DaysToSyncIfHistoryIsFull float64
When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
Globs []string
The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
InputSchema string
The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
RecentNFilesToReadForSchemaDiscovery float64
The number of resent files which will be used to discover the schema for this stream.
Schemaless bool
When enabled, syncs will not validate or structure records against the stream's schema. Default: false
ValidationPolicy string
The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
format This property is required. SourceAzureBlobStorageConfigurationStreamFormat
The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
name This property is required. String
The name of the stream.
daysToSyncIfHistoryIsFull Double
When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
globs List<String>
The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
inputSchema String
The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
recentNFilesToReadForSchemaDiscovery Double
The number of resent files which will be used to discover the schema for this stream.
schemaless Boolean
When enabled, syncs will not validate or structure records against the stream's schema. Default: false
validationPolicy String
The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
format This property is required. SourceAzureBlobStorageConfigurationStreamFormat
The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
name This property is required. string
The name of the stream.
daysToSyncIfHistoryIsFull number
When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
globs string[]
The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
inputSchema string
The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
recentNFilesToReadForSchemaDiscovery number
The number of resent files which will be used to discover the schema for this stream.
schemaless boolean
When enabled, syncs will not validate or structure records against the stream's schema. Default: false
validationPolicy string
The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
format This property is required. SourceAzureBlobStorageConfigurationStreamFormat
The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
name This property is required. str
The name of the stream.
days_to_sync_if_history_is_full float
When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
globs Sequence[str]
The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
input_schema str
The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
recent_n_files_to_read_for_schema_discovery float
The number of resent files which will be used to discover the schema for this stream.
schemaless bool
When enabled, syncs will not validate or structure records against the stream's schema. Default: false
validation_policy str
The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
format This property is required. Property Map
The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
name This property is required. String
The name of the stream.
daysToSyncIfHistoryIsFull Number
When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
globs List<String>
The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
inputSchema String
The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
recentNFilesToReadForSchemaDiscovery Number
The number of resent files which will be used to discover the schema for this stream.
schemaless Boolean
When enabled, syncs will not validate or structure records against the stream's schema. Default: false
validationPolicy String
The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]

SourceAzureBlobStorageConfigurationStreamFormat
, SourceAzureBlobStorageConfigurationStreamFormatArgs

avroFormat Property Map
csvFormat Property Map
jsonlFormat Property Map
parquetFormat Property Map
unstructuredDocumentFormat Property Map
Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.

SourceAzureBlobStorageConfigurationStreamFormatAvroFormat
, SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs

DoubleAsString bool
Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
DoubleAsString bool
Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
doubleAsString Boolean
Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
doubleAsString boolean
Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
double_as_string bool
Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
doubleAsString Boolean
Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false

SourceAzureBlobStorageConfigurationStreamFormatCsvFormat
, SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs

Delimiter string
The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
DoubleQuote bool
Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
Encoding string
The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
EscapeChar string
The character used for escaping special characters. To disallow escaping, leave this field blank.
FalseValues List<string>
A set of case-sensitive strings that should be interpreted as false values.
HeaderDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
IgnoreErrorsOnFieldsMismatch bool
Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
NullValues List<string>
A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
QuoteChar string
The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
SkipRowsAfterHeader double
The number of rows to skip after the header row. Default: 0
SkipRowsBeforeHeader double
The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
StringsCanBeNull bool
Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
TrueValues List<string>
A set of case-sensitive strings that should be interpreted as true values.
Delimiter string
The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
DoubleQuote bool
Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
Encoding string
The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
EscapeChar string
The character used for escaping special characters. To disallow escaping, leave this field blank.
FalseValues []string
A set of case-sensitive strings that should be interpreted as false values.
HeaderDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
IgnoreErrorsOnFieldsMismatch bool
Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
NullValues []string
A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
QuoteChar string
The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
SkipRowsAfterHeader float64
The number of rows to skip after the header row. Default: 0
SkipRowsBeforeHeader float64
The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
StringsCanBeNull bool
Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
TrueValues []string
A set of case-sensitive strings that should be interpreted as true values.
delimiter String
The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
doubleQuote Boolean
Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
encoding String
The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
escapeChar String
The character used for escaping special characters. To disallow escaping, leave this field blank.
falseValues List<String>
A set of case-sensitive strings that should be interpreted as false values.
headerDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
ignoreErrorsOnFieldsMismatch Boolean
Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
nullValues List<String>
A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
quoteChar String
The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
skipRowsAfterHeader Double
The number of rows to skip after the header row. Default: 0
skipRowsBeforeHeader Double
The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
stringsCanBeNull Boolean
Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
trueValues List<String>
A set of case-sensitive strings that should be interpreted as true values.
delimiter string
The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
doubleQuote boolean
Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
encoding string
The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
escapeChar string
The character used for escaping special characters. To disallow escaping, leave this field blank.
falseValues string[]
A set of case-sensitive strings that should be interpreted as false values.
headerDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
ignoreErrorsOnFieldsMismatch boolean
Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
nullValues string[]
A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
quoteChar string
The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
skipRowsAfterHeader number
The number of rows to skip after the header row. Default: 0
skipRowsBeforeHeader number
The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
stringsCanBeNull boolean
Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
trueValues string[]
A set of case-sensitive strings that should be interpreted as true values.
delimiter str
The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
double_quote bool
Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
encoding str
The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
escape_char str
The character used for escaping special characters. To disallow escaping, leave this field blank.
false_values Sequence[str]
A set of case-sensitive strings that should be interpreted as false values.
header_definition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
ignore_errors_on_fields_mismatch bool
Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
null_values Sequence[str]
A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
quote_char str
The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
skip_rows_after_header float
The number of rows to skip after the header row. Default: 0
skip_rows_before_header float
The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
strings_can_be_null bool
Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
true_values Sequence[str]
A set of case-sensitive strings that should be interpreted as true values.
delimiter String
The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
doubleQuote Boolean
Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
encoding String
The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
escapeChar String
The character used for escaping special characters. To disallow escaping, leave this field blank.
falseValues List<String>
A set of case-sensitive strings that should be interpreted as false values.
headerDefinition Property Map
How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
ignoreErrorsOnFieldsMismatch Boolean
Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
nullValues List<String>
A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
quoteChar String
The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
skipRowsAfterHeader Number
The number of rows to skip after the header row. Default: 0
skipRowsBeforeHeader Number
The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
stringsCanBeNull Boolean
Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
trueValues List<String>
A set of case-sensitive strings that should be interpreted as true values.

SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
, SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs

SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvided
, SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs

ColumnNames This property is required. List<string>
The column names that will be used while emitting the CSV records
ColumnNames This property is required. []string
The column names that will be used while emitting the CSV records
columnNames This property is required. List<String>
The column names that will be used while emitting the CSV records
columnNames This property is required. string[]
The column names that will be used while emitting the CSV records
column_names This property is required. Sequence[str]
The column names that will be used while emitting the CSV records
columnNames This property is required. List<String>
The column names that will be used while emitting the CSV records

SourceAzureBlobStorageConfigurationStreamFormatParquetFormat
, SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs

DecimalAsFloat bool
Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
DecimalAsFloat bool
Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
decimalAsFloat Boolean
Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
decimalAsFloat boolean
Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
decimal_as_float bool
Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
decimalAsFloat Boolean
Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false

SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormat
, SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs

Processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
Processing configuration
SkipUnprocessableFiles bool
If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
Strategy string
The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
Processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
Processing configuration
SkipUnprocessableFiles bool
If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
Strategy string
The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
Processing configuration
skipUnprocessableFiles Boolean
If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
strategy String
The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
Processing configuration
skipUnprocessableFiles boolean
If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
strategy string
The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
Processing configuration
skip_unprocessable_files bool
If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
strategy str
The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
processing Property Map
Processing configuration
skipUnprocessableFiles Boolean
If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
strategy String
The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]

SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
, SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs

Local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
Process files locally, supporting fast and ocr modes. This is the default option.
Local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
Process files locally, supporting fast and ocr modes. This is the default option.
local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
Process files locally, supporting fast and ocr modes. This is the default option.
local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
Process files locally, supporting fast and ocr modes. This is the default option.
local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
Process files locally, supporting fast and ocr modes. This is the default option.
local Property Map
Process files locally, supporting fast and ocr modes. This is the default option.

SourceAzureBlobStorageResourceAllocation
, SourceAzureBlobStorageResourceAllocationArgs

Default SourceAzureBlobStorageResourceAllocationDefault
optional resource requirements to run workers (blank for unbounded allocations)
JobSpecifics []SourceAzureBlobStorageResourceAllocationJobSpecific
default SourceAzureBlobStorageResourceAllocationDefault
optional resource requirements to run workers (blank for unbounded allocations)
jobSpecifics SourceAzureBlobStorageResourceAllocationJobSpecific[]
default Property Map
optional resource requirements to run workers (blank for unbounded allocations)
jobSpecifics List<Property Map>

SourceAzureBlobStorageResourceAllocationDefault
, SourceAzureBlobStorageResourceAllocationDefaultArgs

SourceAzureBlobStorageResourceAllocationJobSpecific
, SourceAzureBlobStorageResourceAllocationJobSpecificArgs

JobType string
enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
ResourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
optional resource requirements to run workers (blank for unbounded allocations)
JobType string
enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
ResourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
optional resource requirements to run workers (blank for unbounded allocations)
jobType String
enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
resourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
optional resource requirements to run workers (blank for unbounded allocations)
jobType string
enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
resourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
optional resource requirements to run workers (blank for unbounded allocations)
job_type str
enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
resource_requirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
optional resource requirements to run workers (blank for unbounded allocations)
jobType String
enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
resourceRequirements Property Map
optional resource requirements to run workers (blank for unbounded allocations)

SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
, SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirementsArgs

Import

$ pulumi import airbyte:index/sourceAzureBlobStorage:SourceAzureBlobStorage my_airbyte_source_azure_blob_storage ""
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
airbyte airbytehq/terraform-provider-airbyte
License
Notes
This Pulumi package is based on the airbyte Terraform Provider.