1. Packages
  2. Azure Classic
  3. API Docs
  4. datafactory
  5. DatasetParquet

We recommend using Azure Native.

Azure v6.22.0 published on Tuesday, Apr 1, 2025 by Pulumi

azure.datafactory.DatasetParquet

Explore with Pulumi AI

Manages an Azure Parquet Dataset inside an Azure Data Factory.

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";

const example = new azure.core.ResourceGroup("example", {
    name: "example-resources",
    location: "West Europe",
});
const exampleFactory = new azure.datafactory.Factory("example", {
    name: "example",
    location: example.location,
    resourceGroupName: example.name,
});
const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("example", {
    name: "example",
    dataFactoryId: exampleFactory.id,
    authenticationType: "Anonymous",
    url: "https://www.bing.com",
});
const exampleDatasetParquet = new azure.datafactory.DatasetParquet("example", {
    name: "example",
    dataFactoryId: exampleFactory.id,
    linkedServiceName: exampleLinkedServiceWeb.name,
    httpServerLocation: {
        relativeUrl: "http://www.bing.com",
        path: "foo/bar/",
        filename: "fizz.txt",
    },
});
Copy
import pulumi
import pulumi_azure as azure

example = azure.core.ResourceGroup("example",
    name="example-resources",
    location="West Europe")
example_factory = azure.datafactory.Factory("example",
    name="example",
    location=example.location,
    resource_group_name=example.name)
example_linked_service_web = azure.datafactory.LinkedServiceWeb("example",
    name="example",
    data_factory_id=example_factory.id,
    authentication_type="Anonymous",
    url="https://www.bing.com")
example_dataset_parquet = azure.datafactory.DatasetParquet("example",
    name="example",
    data_factory_id=example_factory.id,
    linked_service_name=example_linked_service_web.name,
    http_server_location={
        "relative_url": "http://www.bing.com",
        "path": "foo/bar/",
        "filename": "fizz.txt",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
			Name:     pulumi.String("example-resources"),
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
			Name:              pulumi.String("example"),
			Location:          example.Location,
			ResourceGroupName: example.Name,
		})
		if err != nil {
			return err
		}
		exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "example", &datafactory.LinkedServiceWebArgs{
			Name:               pulumi.String("example"),
			DataFactoryId:      exampleFactory.ID(),
			AuthenticationType: pulumi.String("Anonymous"),
			Url:                pulumi.String("https://www.bing.com"),
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewDatasetParquet(ctx, "example", &datafactory.DatasetParquetArgs{
			Name:              pulumi.String("example"),
			DataFactoryId:     exampleFactory.ID(),
			LinkedServiceName: exampleLinkedServiceWeb.Name,
			HttpServerLocation: &datafactory.DatasetParquetHttpServerLocationArgs{
				RelativeUrl: pulumi.String("http://www.bing.com"),
				Path:        pulumi.String("foo/bar/"),
				Filename:    pulumi.String("fizz.txt"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;

return await Deployment.RunAsync(() => 
{
    var example = new Azure.Core.ResourceGroup("example", new()
    {
        Name = "example-resources",
        Location = "West Europe",
    });

    var exampleFactory = new Azure.DataFactory.Factory("example", new()
    {
        Name = "example",
        Location = example.Location,
        ResourceGroupName = example.Name,
    });

    var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("example", new()
    {
        Name = "example",
        DataFactoryId = exampleFactory.Id,
        AuthenticationType = "Anonymous",
        Url = "https://www.bing.com",
    });

    var exampleDatasetParquet = new Azure.DataFactory.DatasetParquet("example", new()
    {
        Name = "example",
        DataFactoryId = exampleFactory.Id,
        LinkedServiceName = exampleLinkedServiceWeb.Name,
        HttpServerLocation = new Azure.DataFactory.Inputs.DatasetParquetHttpServerLocationArgs
        {
            RelativeUrl = "http://www.bing.com",
            Path = "foo/bar/",
            Filename = "fizz.txt",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedServiceWeb;
import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
import com.pulumi.azure.datafactory.DatasetParquet;
import com.pulumi.azure.datafactory.DatasetParquetArgs;
import com.pulumi.azure.datafactory.inputs.DatasetParquetHttpServerLocationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new ResourceGroup("example", ResourceGroupArgs.builder()
            .name("example-resources")
            .location("West Europe")
            .build());

        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
            .name("example")
            .location(example.location())
            .resourceGroupName(example.name())
            .build());

        var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()
            .name("example")
            .dataFactoryId(exampleFactory.id())
            .authenticationType("Anonymous")
            .url("https://www.bing.com")
            .build());

        var exampleDatasetParquet = new DatasetParquet("exampleDatasetParquet", DatasetParquetArgs.builder()
            .name("example")
            .dataFactoryId(exampleFactory.id())
            .linkedServiceName(exampleLinkedServiceWeb.name())
            .httpServerLocation(DatasetParquetHttpServerLocationArgs.builder()
                .relativeUrl("http://www.bing.com")
                .path("foo/bar/")
                .filename("fizz.txt")
                .build())
            .build());

    }
}
Copy
resources:
  example:
    type: azure:core:ResourceGroup
    properties:
      name: example-resources
      location: West Europe
  exampleFactory:
    type: azure:datafactory:Factory
    name: example
    properties:
      name: example
      location: ${example.location}
      resourceGroupName: ${example.name}
  exampleLinkedServiceWeb:
    type: azure:datafactory:LinkedServiceWeb
    name: example
    properties:
      name: example
      dataFactoryId: ${exampleFactory.id}
      authenticationType: Anonymous
      url: https://www.bing.com
  exampleDatasetParquet:
    type: azure:datafactory:DatasetParquet
    name: example
    properties:
      name: example
      dataFactoryId: ${exampleFactory.id}
      linkedServiceName: ${exampleLinkedServiceWeb.name}
      httpServerLocation:
        relativeUrl: http://www.bing.com
        path: foo/bar/
        filename: fizz.txt
Copy

Create DatasetParquet Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new DatasetParquet(name: string, args: DatasetParquetArgs, opts?: CustomResourceOptions);
@overload
def DatasetParquet(resource_name: str,
                   args: DatasetParquetArgs,
                   opts: Optional[ResourceOptions] = None)

@overload
def DatasetParquet(resource_name: str,
                   opts: Optional[ResourceOptions] = None,
                   data_factory_id: Optional[str] = None,
                   linked_service_name: Optional[str] = None,
                   azure_blob_storage_location: Optional[DatasetParquetAzureBlobStorageLocationArgs] = None,
                   additional_properties: Optional[Mapping[str, str]] = None,
                   compression_codec: Optional[str] = None,
                   compression_level: Optional[str] = None,
                   azure_blob_fs_location: Optional[DatasetParquetAzureBlobFsLocationArgs] = None,
                   description: Optional[str] = None,
                   folder: Optional[str] = None,
                   http_server_location: Optional[DatasetParquetHttpServerLocationArgs] = None,
                   annotations: Optional[Sequence[str]] = None,
                   name: Optional[str] = None,
                   parameters: Optional[Mapping[str, str]] = None,
                   schema_columns: Optional[Sequence[DatasetParquetSchemaColumnArgs]] = None)
func NewDatasetParquet(ctx *Context, name string, args DatasetParquetArgs, opts ...ResourceOption) (*DatasetParquet, error)
public DatasetParquet(string name, DatasetParquetArgs args, CustomResourceOptions? opts = null)
public DatasetParquet(String name, DatasetParquetArgs args)
public DatasetParquet(String name, DatasetParquetArgs args, CustomResourceOptions options)
type: azure:datafactory:DatasetParquet
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. DatasetParquetArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. DatasetParquetArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. DatasetParquetArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. DatasetParquetArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. DatasetParquetArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var datasetParquetResource = new Azure.DataFactory.DatasetParquet("datasetParquetResource", new()
{
    DataFactoryId = "string",
    LinkedServiceName = "string",
    AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetParquetAzureBlobStorageLocationArgs
    {
        Container = "string",
        DynamicContainerEnabled = false,
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        Filename = "string",
        Path = "string",
    },
    AdditionalProperties = 
    {
        { "string", "string" },
    },
    CompressionCodec = "string",
    CompressionLevel = "string",
    AzureBlobFsLocation = new Azure.DataFactory.Inputs.DatasetParquetAzureBlobFsLocationArgs
    {
        DynamicFileSystemEnabled = false,
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        FileSystem = "string",
        Filename = "string",
        Path = "string",
    },
    Description = "string",
    Folder = "string",
    HttpServerLocation = new Azure.DataFactory.Inputs.DatasetParquetHttpServerLocationArgs
    {
        Filename = "string",
        RelativeUrl = "string",
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        Path = "string",
    },
    Annotations = new[]
    {
        "string",
    },
    Name = "string",
    Parameters = 
    {
        { "string", "string" },
    },
    SchemaColumns = new[]
    {
        new Azure.DataFactory.Inputs.DatasetParquetSchemaColumnArgs
        {
            Name = "string",
            Description = "string",
            Type = "string",
        },
    },
});
Copy
example, err := datafactory.NewDatasetParquet(ctx, "datasetParquetResource", &datafactory.DatasetParquetArgs{
	DataFactoryId:     pulumi.String("string"),
	LinkedServiceName: pulumi.String("string"),
	AzureBlobStorageLocation: &datafactory.DatasetParquetAzureBlobStorageLocationArgs{
		Container:               pulumi.String("string"),
		DynamicContainerEnabled: pulumi.Bool(false),
		DynamicFilenameEnabled:  pulumi.Bool(false),
		DynamicPathEnabled:      pulumi.Bool(false),
		Filename:                pulumi.String("string"),
		Path:                    pulumi.String("string"),
	},
	AdditionalProperties: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	CompressionCodec: pulumi.String("string"),
	CompressionLevel: pulumi.String("string"),
	AzureBlobFsLocation: &datafactory.DatasetParquetAzureBlobFsLocationArgs{
		DynamicFileSystemEnabled: pulumi.Bool(false),
		DynamicFilenameEnabled:   pulumi.Bool(false),
		DynamicPathEnabled:       pulumi.Bool(false),
		FileSystem:               pulumi.String("string"),
		Filename:                 pulumi.String("string"),
		Path:                     pulumi.String("string"),
	},
	Description: pulumi.String("string"),
	Folder:      pulumi.String("string"),
	HttpServerLocation: &datafactory.DatasetParquetHttpServerLocationArgs{
		Filename:               pulumi.String("string"),
		RelativeUrl:            pulumi.String("string"),
		DynamicFilenameEnabled: pulumi.Bool(false),
		DynamicPathEnabled:     pulumi.Bool(false),
		Path:                   pulumi.String("string"),
	},
	Annotations: pulumi.StringArray{
		pulumi.String("string"),
	},
	Name: pulumi.String("string"),
	Parameters: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	SchemaColumns: datafactory.DatasetParquetSchemaColumnArray{
		&datafactory.DatasetParquetSchemaColumnArgs{
			Name:        pulumi.String("string"),
			Description: pulumi.String("string"),
			Type:        pulumi.String("string"),
		},
	},
})
Copy
var datasetParquetResource = new DatasetParquet("datasetParquetResource", DatasetParquetArgs.builder()
    .dataFactoryId("string")
    .linkedServiceName("string")
    .azureBlobStorageLocation(DatasetParquetAzureBlobStorageLocationArgs.builder()
        .container("string")
        .dynamicContainerEnabled(false)
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .filename("string")
        .path("string")
        .build())
    .additionalProperties(Map.of("string", "string"))
    .compressionCodec("string")
    .compressionLevel("string")
    .azureBlobFsLocation(DatasetParquetAzureBlobFsLocationArgs.builder()
        .dynamicFileSystemEnabled(false)
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .fileSystem("string")
        .filename("string")
        .path("string")
        .build())
    .description("string")
    .folder("string")
    .httpServerLocation(DatasetParquetHttpServerLocationArgs.builder()
        .filename("string")
        .relativeUrl("string")
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .path("string")
        .build())
    .annotations("string")
    .name("string")
    .parameters(Map.of("string", "string"))
    .schemaColumns(DatasetParquetSchemaColumnArgs.builder()
        .name("string")
        .description("string")
        .type("string")
        .build())
    .build());
Copy
dataset_parquet_resource = azure.datafactory.DatasetParquet("datasetParquetResource",
    data_factory_id="string",
    linked_service_name="string",
    azure_blob_storage_location={
        "container": "string",
        "dynamic_container_enabled": False,
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "filename": "string",
        "path": "string",
    },
    additional_properties={
        "string": "string",
    },
    compression_codec="string",
    compression_level="string",
    azure_blob_fs_location={
        "dynamic_file_system_enabled": False,
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "file_system": "string",
        "filename": "string",
        "path": "string",
    },
    description="string",
    folder="string",
    http_server_location={
        "filename": "string",
        "relative_url": "string",
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "path": "string",
    },
    annotations=["string"],
    name="string",
    parameters={
        "string": "string",
    },
    schema_columns=[{
        "name": "string",
        "description": "string",
        "type": "string",
    }])
Copy
const datasetParquetResource = new azure.datafactory.DatasetParquet("datasetParquetResource", {
    dataFactoryId: "string",
    linkedServiceName: "string",
    azureBlobStorageLocation: {
        container: "string",
        dynamicContainerEnabled: false,
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        filename: "string",
        path: "string",
    },
    additionalProperties: {
        string: "string",
    },
    compressionCodec: "string",
    compressionLevel: "string",
    azureBlobFsLocation: {
        dynamicFileSystemEnabled: false,
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        fileSystem: "string",
        filename: "string",
        path: "string",
    },
    description: "string",
    folder: "string",
    httpServerLocation: {
        filename: "string",
        relativeUrl: "string",
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        path: "string",
    },
    annotations: ["string"],
    name: "string",
    parameters: {
        string: "string",
    },
    schemaColumns: [{
        name: "string",
        description: "string",
        type: "string",
    }],
});
Copy
type: azure:datafactory:DatasetParquet
properties:
    additionalProperties:
        string: string
    annotations:
        - string
    azureBlobFsLocation:
        dynamicFileSystemEnabled: false
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        fileSystem: string
        filename: string
        path: string
    azureBlobStorageLocation:
        container: string
        dynamicContainerEnabled: false
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        filename: string
        path: string
    compressionCodec: string
    compressionLevel: string
    dataFactoryId: string
    description: string
    folder: string
    httpServerLocation:
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        filename: string
        path: string
        relativeUrl: string
    linkedServiceName: string
    name: string
    parameters:
        string: string
    schemaColumns:
        - description: string
          name: string
          type: string
Copy

DatasetParquet Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The DatasetParquet resource accepts the following input properties:

DataFactoryId
This property is required.
Changes to this property will trigger replacement.
string
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
LinkedServiceName This property is required. string
The Data Factory Linked Service name in which to associate the Dataset with.
AdditionalProperties Dictionary<string, string>

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

Annotations List<string>
List of tags that can be used for describing the Data Factory Dataset.
AzureBlobFsLocation DatasetParquetAzureBlobFsLocation
A azure_blob_fs_location block as defined below.
AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

CompressionCodec string
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
CompressionLevel string
Specifies the compression level. Possible values are Optimal and Fastest,
Description string
The description for the Data Factory Dataset.
Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
HttpServerLocation DatasetParquetHttpServerLocation
A http_server_location block as defined below.
Name Changes to this property will trigger replacement. string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Dataset.
SchemaColumns List<DatasetParquetSchemaColumn>
A schema_column block as defined below.
DataFactoryId
This property is required.
Changes to this property will trigger replacement.
string
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
LinkedServiceName This property is required. string
The Data Factory Linked Service name in which to associate the Dataset with.
AdditionalProperties map[string]string

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

Annotations []string
List of tags that can be used for describing the Data Factory Dataset.
AzureBlobFsLocation DatasetParquetAzureBlobFsLocationArgs
A azure_blob_fs_location block as defined below.
AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocationArgs

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

CompressionCodec string
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
CompressionLevel string
Specifies the compression level. Possible values are Optimal and Fastest,
Description string
The description for the Data Factory Dataset.
Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
HttpServerLocation DatasetParquetHttpServerLocationArgs
A http_server_location block as defined below.
Name Changes to this property will trigger replacement. string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
Parameters map[string]string
A map of parameters to associate with the Data Factory Dataset.
SchemaColumns []DatasetParquetSchemaColumnArgs
A schema_column block as defined below.
dataFactoryId
This property is required.
Changes to this property will trigger replacement.
String
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
linkedServiceName This property is required. String
The Data Factory Linked Service name in which to associate the Dataset with.
additionalProperties Map<String,String>

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
azureBlobFsLocation DatasetParquetAzureBlobFsLocation
A azure_blob_fs_location block as defined below.
azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compressionCodec String
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compressionLevel String
Specifies the compression level. Possible values are Optimal and Fastest,
description String
The description for the Data Factory Dataset.
folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
httpServerLocation DatasetParquetHttpServerLocation
A http_server_location block as defined below.
name Changes to this property will trigger replacement. String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters Map<String,String>
A map of parameters to associate with the Data Factory Dataset.
schemaColumns List<DatasetParquetSchemaColumn>
A schema_column block as defined below.
dataFactoryId
This property is required.
Changes to this property will trigger replacement.
string
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
linkedServiceName This property is required. string
The Data Factory Linked Service name in which to associate the Dataset with.
additionalProperties {[key: string]: string}

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations string[]
List of tags that can be used for describing the Data Factory Dataset.
azureBlobFsLocation DatasetParquetAzureBlobFsLocation
A azure_blob_fs_location block as defined below.
azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compressionCodec string
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compressionLevel string
Specifies the compression level. Possible values are Optimal and Fastest,
description string
The description for the Data Factory Dataset.
folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
httpServerLocation DatasetParquetHttpServerLocation
A http_server_location block as defined below.
name Changes to this property will trigger replacement. string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Dataset.
schemaColumns DatasetParquetSchemaColumn[]
A schema_column block as defined below.
data_factory_id
This property is required.
Changes to this property will trigger replacement.
str
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
linked_service_name This property is required. str
The Data Factory Linked Service name in which to associate the Dataset with.
additional_properties Mapping[str, str]

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations Sequence[str]
List of tags that can be used for describing the Data Factory Dataset.
azure_blob_fs_location DatasetParquetAzureBlobFsLocationArgs
A azure_blob_fs_location block as defined below.
azure_blob_storage_location DatasetParquetAzureBlobStorageLocationArgs

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compression_codec str
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compression_level str
Specifies the compression level. Possible values are Optimal and Fastest,
description str
The description for the Data Factory Dataset.
folder str
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
http_server_location DatasetParquetHttpServerLocationArgs
A http_server_location block as defined below.
name Changes to this property will trigger replacement. str
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Dataset.
schema_columns Sequence[DatasetParquetSchemaColumnArgs]
A schema_column block as defined below.
dataFactoryId
This property is required.
Changes to this property will trigger replacement.
String
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
linkedServiceName This property is required. String
The Data Factory Linked Service name in which to associate the Dataset with.
additionalProperties Map<String>

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
azureBlobFsLocation Property Map
A azure_blob_fs_location block as defined below.
azureBlobStorageLocation Property Map

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compressionCodec String
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compressionLevel String
Specifies the compression level. Possible values are Optimal and Fastest,
description String
The description for the Data Factory Dataset.
folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
httpServerLocation Property Map
A http_server_location block as defined below.
name Changes to this property will trigger replacement. String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters Map<String>
A map of parameters to associate with the Data Factory Dataset.
schemaColumns List<Property Map>
A schema_column block as defined below.

Outputs

All input properties are implicitly available as output properties. Additionally, the DatasetParquet resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Id string
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.
id string
The provider-assigned unique ID for this managed resource.
id str
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.

Look up Existing DatasetParquet Resource

Get an existing DatasetParquet resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DatasetParquetState, opts?: CustomResourceOptions): DatasetParquet
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        additional_properties: Optional[Mapping[str, str]] = None,
        annotations: Optional[Sequence[str]] = None,
        azure_blob_fs_location: Optional[DatasetParquetAzureBlobFsLocationArgs] = None,
        azure_blob_storage_location: Optional[DatasetParquetAzureBlobStorageLocationArgs] = None,
        compression_codec: Optional[str] = None,
        compression_level: Optional[str] = None,
        data_factory_id: Optional[str] = None,
        description: Optional[str] = None,
        folder: Optional[str] = None,
        http_server_location: Optional[DatasetParquetHttpServerLocationArgs] = None,
        linked_service_name: Optional[str] = None,
        name: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        schema_columns: Optional[Sequence[DatasetParquetSchemaColumnArgs]] = None) -> DatasetParquet
func GetDatasetParquet(ctx *Context, name string, id IDInput, state *DatasetParquetState, opts ...ResourceOption) (*DatasetParquet, error)
public static DatasetParquet Get(string name, Input<string> id, DatasetParquetState? state, CustomResourceOptions? opts = null)
public static DatasetParquet get(String name, Output<String> id, DatasetParquetState state, CustomResourceOptions options)
resources:  _:    type: azure:datafactory:DatasetParquet    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AdditionalProperties Dictionary<string, string>

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

Annotations List<string>
List of tags that can be used for describing the Data Factory Dataset.
AzureBlobFsLocation DatasetParquetAzureBlobFsLocation
A azure_blob_fs_location block as defined below.
AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

CompressionCodec string
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
CompressionLevel string
Specifies the compression level. Possible values are Optimal and Fastest,
DataFactoryId Changes to this property will trigger replacement. string
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
Description string
The description for the Data Factory Dataset.
Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
HttpServerLocation DatasetParquetHttpServerLocation
A http_server_location block as defined below.
LinkedServiceName string
The Data Factory Linked Service name in which to associate the Dataset with.
Name Changes to this property will trigger replacement. string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Dataset.
SchemaColumns List<DatasetParquetSchemaColumn>
A schema_column block as defined below.
AdditionalProperties map[string]string

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

Annotations []string
List of tags that can be used for describing the Data Factory Dataset.
AzureBlobFsLocation DatasetParquetAzureBlobFsLocationArgs
A azure_blob_fs_location block as defined below.
AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocationArgs

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

CompressionCodec string
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
CompressionLevel string
Specifies the compression level. Possible values are Optimal and Fastest,
DataFactoryId Changes to this property will trigger replacement. string
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
Description string
The description for the Data Factory Dataset.
Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
HttpServerLocation DatasetParquetHttpServerLocationArgs
A http_server_location block as defined below.
LinkedServiceName string
The Data Factory Linked Service name in which to associate the Dataset with.
Name Changes to this property will trigger replacement. string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
Parameters map[string]string
A map of parameters to associate with the Data Factory Dataset.
SchemaColumns []DatasetParquetSchemaColumnArgs
A schema_column block as defined below.
additionalProperties Map<String,String>

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
azureBlobFsLocation DatasetParquetAzureBlobFsLocation
A azure_blob_fs_location block as defined below.
azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compressionCodec String
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compressionLevel String
Specifies the compression level. Possible values are Optimal and Fastest,
dataFactoryId Changes to this property will trigger replacement. String
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
description String
The description for the Data Factory Dataset.
folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
httpServerLocation DatasetParquetHttpServerLocation
A http_server_location block as defined below.
linkedServiceName String
The Data Factory Linked Service name in which to associate the Dataset with.
name Changes to this property will trigger replacement. String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters Map<String,String>
A map of parameters to associate with the Data Factory Dataset.
schemaColumns List<DatasetParquetSchemaColumn>
A schema_column block as defined below.
additionalProperties {[key: string]: string}

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations string[]
List of tags that can be used for describing the Data Factory Dataset.
azureBlobFsLocation DatasetParquetAzureBlobFsLocation
A azure_blob_fs_location block as defined below.
azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compressionCodec string
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compressionLevel string
Specifies the compression level. Possible values are Optimal and Fastest,
dataFactoryId Changes to this property will trigger replacement. string
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
description string
The description for the Data Factory Dataset.
folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
httpServerLocation DatasetParquetHttpServerLocation
A http_server_location block as defined below.
linkedServiceName string
The Data Factory Linked Service name in which to associate the Dataset with.
name Changes to this property will trigger replacement. string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Dataset.
schemaColumns DatasetParquetSchemaColumn[]
A schema_column block as defined below.
additional_properties Mapping[str, str]

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations Sequence[str]
List of tags that can be used for describing the Data Factory Dataset.
azure_blob_fs_location DatasetParquetAzureBlobFsLocationArgs
A azure_blob_fs_location block as defined below.
azure_blob_storage_location DatasetParquetAzureBlobStorageLocationArgs

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compression_codec str
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compression_level str
Specifies the compression level. Possible values are Optimal and Fastest,
data_factory_id Changes to this property will trigger replacement. str
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
description str
The description for the Data Factory Dataset.
folder str
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
http_server_location DatasetParquetHttpServerLocationArgs
A http_server_location block as defined below.
linked_service_name str
The Data Factory Linked Service name in which to associate the Dataset with.
name Changes to this property will trigger replacement. str
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Dataset.
schema_columns Sequence[DatasetParquetSchemaColumnArgs]
A schema_column block as defined below.
additionalProperties Map<String>

A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Parquet Dataset:

annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
azureBlobFsLocation Property Map
A azure_blob_fs_location block as defined below.
azureBlobStorageLocation Property Map

A azure_blob_storage_location block as defined below.

The following supported arguments are specific to Parquet Dataset:

compressionCodec String
The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.
compressionLevel String
Specifies the compression level. Possible values are Optimal and Fastest,
dataFactoryId Changes to this property will trigger replacement. String
The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
description String
The description for the Data Factory Dataset.
folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
httpServerLocation Property Map
A http_server_location block as defined below.
linkedServiceName String
The Data Factory Linked Service name in which to associate the Dataset with.
name Changes to this property will trigger replacement. String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
parameters Map<String>
A map of parameters to associate with the Data Factory Dataset.
schemaColumns List<Property Map>
A schema_column block as defined below.

Supporting Types

DatasetParquetAzureBlobFsLocation
, DatasetParquetAzureBlobFsLocationArgs

DynamicFileSystemEnabled bool
Is the file_system using dynamic expression, function or system variables? Defaults to false.
DynamicFilenameEnabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
DynamicPathEnabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
FileSystem string
The container on the Azure Data Lake Storage Account hosting the file.
Filename string
The filename of the file on the Azure Data Lake Storage Account.
Path string
The folder path to the file on the Azure Data Lake Storage Account.
DynamicFileSystemEnabled bool
Is the file_system using dynamic expression, function or system variables? Defaults to false.
DynamicFilenameEnabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
DynamicPathEnabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
FileSystem string
The container on the Azure Data Lake Storage Account hosting the file.
Filename string
The filename of the file on the Azure Data Lake Storage Account.
Path string
The folder path to the file on the Azure Data Lake Storage Account.
dynamicFileSystemEnabled Boolean
Is the file_system using dynamic expression, function or system variables? Defaults to false.
dynamicFilenameEnabled Boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled Boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
fileSystem String
The container on the Azure Data Lake Storage Account hosting the file.
filename String
The filename of the file on the Azure Data Lake Storage Account.
path String
The folder path to the file on the Azure Data Lake Storage Account.
dynamicFileSystemEnabled boolean
Is the file_system using dynamic expression, function or system variables? Defaults to false.
dynamicFilenameEnabled boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
fileSystem string
The container on the Azure Data Lake Storage Account hosting the file.
filename string
The filename of the file on the Azure Data Lake Storage Account.
path string
The folder path to the file on the Azure Data Lake Storage Account.
dynamic_file_system_enabled bool
Is the file_system using dynamic expression, function or system variables? Defaults to false.
dynamic_filename_enabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamic_path_enabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
file_system str
The container on the Azure Data Lake Storage Account hosting the file.
filename str
The filename of the file on the Azure Data Lake Storage Account.
path str
The folder path to the file on the Azure Data Lake Storage Account.
dynamicFileSystemEnabled Boolean
Is the file_system using dynamic expression, function or system variables? Defaults to false.
dynamicFilenameEnabled Boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled Boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
fileSystem String
The container on the Azure Data Lake Storage Account hosting the file.
filename String
The filename of the file on the Azure Data Lake Storage Account.
path String
The folder path to the file on the Azure Data Lake Storage Account.

DatasetParquetAzureBlobStorageLocation
, DatasetParquetAzureBlobStorageLocationArgs

Container This property is required. string
The container on the Azure Blob Storage Account hosting the file.
DynamicContainerEnabled bool
Is the container using dynamic expression, function or system variables? Defaults to false.
DynamicFilenameEnabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
DynamicPathEnabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
Filename string
The filename of the file on the Azure Blob Storage Account.
Path string
The folder path to the file on the Azure Blob Storage Account.
Container This property is required. string
The container on the Azure Blob Storage Account hosting the file.
DynamicContainerEnabled bool
Is the container using dynamic expression, function or system variables? Defaults to false.
DynamicFilenameEnabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
DynamicPathEnabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
Filename string
The filename of the file on the Azure Blob Storage Account.
Path string
The folder path to the file on the Azure Blob Storage Account.
container This property is required. String
The container on the Azure Blob Storage Account hosting the file.
dynamicContainerEnabled Boolean
Is the container using dynamic expression, function or system variables? Defaults to false.
dynamicFilenameEnabled Boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled Boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
filename String
The filename of the file on the Azure Blob Storage Account.
path String
The folder path to the file on the Azure Blob Storage Account.
container This property is required. string
The container on the Azure Blob Storage Account hosting the file.
dynamicContainerEnabled boolean
Is the container using dynamic expression, function or system variables? Defaults to false.
dynamicFilenameEnabled boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
filename string
The filename of the file on the Azure Blob Storage Account.
path string
The folder path to the file on the Azure Blob Storage Account.
container This property is required. str
The container on the Azure Blob Storage Account hosting the file.
dynamic_container_enabled bool
Is the container using dynamic expression, function or system variables? Defaults to false.
dynamic_filename_enabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamic_path_enabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
filename str
The filename of the file on the Azure Blob Storage Account.
path str
The folder path to the file on the Azure Blob Storage Account.
container This property is required. String
The container on the Azure Blob Storage Account hosting the file.
dynamicContainerEnabled Boolean
Is the container using dynamic expression, function or system variables? Defaults to false.
dynamicFilenameEnabled Boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled Boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
filename String
The filename of the file on the Azure Blob Storage Account.
path String
The folder path to the file on the Azure Blob Storage Account.

DatasetParquetHttpServerLocation
, DatasetParquetHttpServerLocationArgs

Filename This property is required. string
The filename of the file on the web server.
RelativeUrl This property is required. string
The base URL to the web server hosting the file.
DynamicFilenameEnabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
DynamicPathEnabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
Path string
The folder path to the file on the web server.
Filename This property is required. string
The filename of the file on the web server.
RelativeUrl This property is required. string
The base URL to the web server hosting the file.
DynamicFilenameEnabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
DynamicPathEnabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
Path string
The folder path to the file on the web server.
filename This property is required. String
The filename of the file on the web server.
relativeUrl This property is required. String
The base URL to the web server hosting the file.
dynamicFilenameEnabled Boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled Boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
path String
The folder path to the file on the web server.
filename This property is required. string
The filename of the file on the web server.
relativeUrl This property is required. string
The base URL to the web server hosting the file.
dynamicFilenameEnabled boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
path string
The folder path to the file on the web server.
filename This property is required. str
The filename of the file on the web server.
relative_url This property is required. str
The base URL to the web server hosting the file.
dynamic_filename_enabled bool
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamic_path_enabled bool
Is the path using dynamic expression, function or system variables? Defaults to false.
path str
The folder path to the file on the web server.
filename This property is required. String
The filename of the file on the web server.
relativeUrl This property is required. String
The base URL to the web server hosting the file.
dynamicFilenameEnabled Boolean
Is the filename using dynamic expression, function or system variables? Defaults to false.
dynamicPathEnabled Boolean
Is the path using dynamic expression, function or system variables? Defaults to false.
path String
The folder path to the file on the web server.

DatasetParquetSchemaColumn
, DatasetParquetSchemaColumnArgs

Name This property is required. string
The name of the column.
Description string
The description of the column.
Type string
Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.
Name This property is required. string
The name of the column.
Description string
The description of the column.
Type string
Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.
name This property is required. String
The name of the column.
description String
The description of the column.
type String
Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.
name This property is required. string
The name of the column.
description string
The description of the column.
type string
Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.
name This property is required. str
The name of the column.
description str
The description of the column.
type str
Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.
name This property is required. String
The name of the column.
description String
The description of the column.
type String
Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

Import

Data Factory Datasets can be imported using the resource id, e.g.

$ pulumi import azure:datafactory/datasetParquet:DatasetParquet example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Azure Classic pulumi/pulumi-azure
License
Apache-2.0
Notes
This Pulumi package is based on the azurerm Terraform Provider.