alicloud.log.Etl
Explore with Pulumi AI
The data transformation of the log service is a hosted, highly available, and scalable data processing service, which is widely applicable to scenarios such as data regularization, enrichment, distribution, aggregation, and index reconstruction. Refer to details.
NOTE: Available in 1.120.0
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";
import * as random from "@pulumi/random";
const _default = new random.index.Integer("default", {
    max: 99999,
    min: 10000,
});
const example = new alicloud.log.Project("example", {
    name: `terraform-example-${_default.result}`,
    description: "terraform-example",
});
const exampleStore = new alicloud.log.Store("example", {
    project: example.name,
    name: "example-store",
    retentionPeriod: 3650,
    shardCount: 3,
    autoSplit: true,
    maxSplitShardCount: 60,
    appendMeta: true,
});
const example2 = new alicloud.log.Store("example2", {
    project: example.name,
    name: "example-store2",
    retentionPeriod: 3650,
    shardCount: 3,
    autoSplit: true,
    maxSplitShardCount: 60,
    appendMeta: true,
});
const example3 = new alicloud.log.Store("example3", {
    project: example.name,
    name: "example-store3",
    retentionPeriod: 3650,
    shardCount: 3,
    autoSplit: true,
    maxSplitShardCount: 60,
    appendMeta: true,
});
const exampleEtl = new alicloud.log.Etl("example", {
    etlName: "terraform-example",
    project: example.name,
    displayName: "terraform-example",
    description: "terraform-example",
    accessKeyId: "access_key_id",
    accessKeySecret: "access_key_secret",
    script: "e_set('new','key')",
    logstore: exampleStore.name,
    etlSinks: [
        {
            name: "target_name",
            accessKeyId: "example2_access_key_id",
            accessKeySecret: "example2_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: example.name,
            logstore: example2.name,
        },
        {
            name: "target_name2",
            accessKeyId: "example3_access_key_id",
            accessKeySecret: "example3_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: example.name,
            logstore: example3.name,
        },
    ],
});
import pulumi
import pulumi_alicloud as alicloud
import pulumi_random as random
default = random.index.Integer("default",
    max=99999,
    min=10000)
example = alicloud.log.Project("example",
    name=f"terraform-example-{default['result']}",
    description="terraform-example")
example_store = alicloud.log.Store("example",
    project=example.name,
    name="example-store",
    retention_period=3650,
    shard_count=3,
    auto_split=True,
    max_split_shard_count=60,
    append_meta=True)
example2 = alicloud.log.Store("example2",
    project=example.name,
    name="example-store2",
    retention_period=3650,
    shard_count=3,
    auto_split=True,
    max_split_shard_count=60,
    append_meta=True)
example3 = alicloud.log.Store("example3",
    project=example.name,
    name="example-store3",
    retention_period=3650,
    shard_count=3,
    auto_split=True,
    max_split_shard_count=60,
    append_meta=True)
example_etl = alicloud.log.Etl("example",
    etl_name="terraform-example",
    project=example.name,
    display_name="terraform-example",
    description="terraform-example",
    access_key_id="access_key_id",
    access_key_secret="access_key_secret",
    script="e_set('new','key')",
    logstore=example_store.name,
    etl_sinks=[
        alicloud.log.EtlEtlSinkArgs(
            name="target_name",
            access_key_id="example2_access_key_id",
            access_key_secret="example2_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=example.name,
            logstore=example2.name,
        ),
        alicloud.log.EtlEtlSinkArgs(
            name="target_name2",
            access_key_id="example3_access_key_id",
            access_key_secret="example3_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=example.name,
            logstore=example3.name,
        ),
    ])
package main
import (
	"fmt"
	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
	"github.com/pulumi/pulumi-random/sdk/v4/go/random"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := random.NewInteger(ctx, "default", &random.IntegerArgs{
			Max: 99999,
			Min: 10000,
		})
		if err != nil {
			return err
		}
		example, err := log.NewProject(ctx, "example", &log.ProjectArgs{
			Name:        pulumi.String(fmt.Sprintf("terraform-example-%v", _default.Result)),
			Description: pulumi.String("terraform-example"),
		})
		if err != nil {
			return err
		}
		exampleStore, err := log.NewStore(ctx, "example", &log.StoreArgs{
			Project:            example.Name,
			Name:               pulumi.String("example-store"),
			RetentionPeriod:    pulumi.Int(3650),
			ShardCount:         pulumi.Int(3),
			AutoSplit:          pulumi.Bool(true),
			MaxSplitShardCount: pulumi.Int(60),
			AppendMeta:         pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		example2, err := log.NewStore(ctx, "example2", &log.StoreArgs{
			Project:            example.Name,
			Name:               pulumi.String("example-store2"),
			RetentionPeriod:    pulumi.Int(3650),
			ShardCount:         pulumi.Int(3),
			AutoSplit:          pulumi.Bool(true),
			MaxSplitShardCount: pulumi.Int(60),
			AppendMeta:         pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		example3, err := log.NewStore(ctx, "example3", &log.StoreArgs{
			Project:            example.Name,
			Name:               pulumi.String("example-store3"),
			RetentionPeriod:    pulumi.Int(3650),
			ShardCount:         pulumi.Int(3),
			AutoSplit:          pulumi.Bool(true),
			MaxSplitShardCount: pulumi.Int(60),
			AppendMeta:         pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		_, err = log.NewEtl(ctx, "example", &log.EtlArgs{
			EtlName:         pulumi.String("terraform-example"),
			Project:         example.Name,
			DisplayName:     pulumi.String("terraform-example"),
			Description:     pulumi.String("terraform-example"),
			AccessKeyId:     pulumi.String("access_key_id"),
			AccessKeySecret: pulumi.String("access_key_secret"),
			Script:          pulumi.String("e_set('new','key')"),
			Logstore:        exampleStore.Name,
			EtlSinks: log.EtlEtlSinkArray{
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name"),
					AccessKeyId:     pulumi.String("example2_access_key_id"),
					AccessKeySecret: pulumi.String("example2_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         example.Name,
					Logstore:        example2.Name,
				},
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name2"),
					AccessKeyId:     pulumi.String("example3_access_key_id"),
					AccessKeySecret: pulumi.String("example3_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         example.Name,
					Logstore:        example3.Name,
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AliCloud = Pulumi.AliCloud;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() => 
{
    var @default = new Random.Index.Integer("default", new()
    {
        Max = 99999,
        Min = 10000,
    });
    var example = new AliCloud.Log.Project("example", new()
    {
        Name = $"terraform-example-{@default.Result}",
        Description = "terraform-example",
    });
    var exampleStore = new AliCloud.Log.Store("example", new()
    {
        Project = example.Name,
        Name = "example-store",
        RetentionPeriod = 3650,
        ShardCount = 3,
        AutoSplit = true,
        MaxSplitShardCount = 60,
        AppendMeta = true,
    });
    var example2 = new AliCloud.Log.Store("example2", new()
    {
        Project = example.Name,
        Name = "example-store2",
        RetentionPeriod = 3650,
        ShardCount = 3,
        AutoSplit = true,
        MaxSplitShardCount = 60,
        AppendMeta = true,
    });
    var example3 = new AliCloud.Log.Store("example3", new()
    {
        Project = example.Name,
        Name = "example-store3",
        RetentionPeriod = 3650,
        ShardCount = 3,
        AutoSplit = true,
        MaxSplitShardCount = 60,
        AppendMeta = true,
    });
    var exampleEtl = new AliCloud.Log.Etl("example", new()
    {
        EtlName = "terraform-example",
        Project = example.Name,
        DisplayName = "terraform-example",
        Description = "terraform-example",
        AccessKeyId = "access_key_id",
        AccessKeySecret = "access_key_secret",
        Script = "e_set('new','key')",
        Logstore = exampleStore.Name,
        EtlSinks = new[]
        {
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name",
                AccessKeyId = "example2_access_key_id",
                AccessKeySecret = "example2_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = example.Name,
                Logstore = example2.Name,
            },
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name2",
                AccessKeyId = "example3_access_key_id",
                AccessKeySecret = "example3_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = example.Name,
                Logstore = example3.Name,
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.random.integer;
import com.pulumi.random.IntegerArgs;
import com.pulumi.alicloud.log.Project;
import com.pulumi.alicloud.log.ProjectArgs;
import com.pulumi.alicloud.log.Store;
import com.pulumi.alicloud.log.StoreArgs;
import com.pulumi.alicloud.log.Etl;
import com.pulumi.alicloud.log.EtlArgs;
import com.pulumi.alicloud.log.inputs.EtlEtlSinkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var default_ = new Integer("default", IntegerArgs.builder()
            .max(99999)
            .min(10000)
            .build());
        var example = new Project("example", ProjectArgs.builder()
            .name(String.format("terraform-example-%s", default_.result()))
            .description("terraform-example")
            .build());
        var exampleStore = new Store("exampleStore", StoreArgs.builder()
            .project(example.name())
            .name("example-store")
            .retentionPeriod(3650)
            .shardCount(3)
            .autoSplit(true)
            .maxSplitShardCount(60)
            .appendMeta(true)
            .build());
        var example2 = new Store("example2", StoreArgs.builder()
            .project(example.name())
            .name("example-store2")
            .retentionPeriod(3650)
            .shardCount(3)
            .autoSplit(true)
            .maxSplitShardCount(60)
            .appendMeta(true)
            .build());
        var example3 = new Store("example3", StoreArgs.builder()
            .project(example.name())
            .name("example-store3")
            .retentionPeriod(3650)
            .shardCount(3)
            .autoSplit(true)
            .maxSplitShardCount(60)
            .appendMeta(true)
            .build());
        var exampleEtl = new Etl("exampleEtl", EtlArgs.builder()
            .etlName("terraform-example")
            .project(example.name())
            .displayName("terraform-example")
            .description("terraform-example")
            .accessKeyId("access_key_id")
            .accessKeySecret("access_key_secret")
            .script("e_set('new','key')")
            .logstore(exampleStore.name())
            .etlSinks(            
                EtlEtlSinkArgs.builder()
                    .name("target_name")
                    .accessKeyId("example2_access_key_id")
                    .accessKeySecret("example2_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(example.name())
                    .logstore(example2.name())
                    .build(),
                EtlEtlSinkArgs.builder()
                    .name("target_name2")
                    .accessKeyId("example3_access_key_id")
                    .accessKeySecret("example3_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(example.name())
                    .logstore(example3.name())
                    .build())
            .build());
    }
}
resources:
  default:
    type: random:integer
    properties:
      max: 99999
      min: 10000
  example:
    type: alicloud:log:Project
    properties:
      name: terraform-example-${default.result}
      description: terraform-example
  exampleStore:
    type: alicloud:log:Store
    name: example
    properties:
      project: ${example.name}
      name: example-store
      retentionPeriod: 3650
      shardCount: 3
      autoSplit: true
      maxSplitShardCount: 60
      appendMeta: true
  example2:
    type: alicloud:log:Store
    properties:
      project: ${example.name}
      name: example-store2
      retentionPeriod: 3650
      shardCount: 3
      autoSplit: true
      maxSplitShardCount: 60
      appendMeta: true
  example3:
    type: alicloud:log:Store
    properties:
      project: ${example.name}
      name: example-store3
      retentionPeriod: 3650
      shardCount: 3
      autoSplit: true
      maxSplitShardCount: 60
      appendMeta: true
  exampleEtl:
    type: alicloud:log:Etl
    name: example
    properties:
      etlName: terraform-example
      project: ${example.name}
      displayName: terraform-example
      description: terraform-example
      accessKeyId: access_key_id
      accessKeySecret: access_key_secret
      script: e_set('new','key')
      logstore: ${exampleStore.name}
      etlSinks:
        - name: target_name
          accessKeyId: example2_access_key_id
          accessKeySecret: example2_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${example.name}
          logstore: ${example2.name}
        - name: target_name2
          accessKeyId: example3_access_key_id
          accessKeySecret: example3_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${example.name}
          logstore: ${example3.name}
Create Etl Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Etl(name: string, args: EtlArgs, opts?: CustomResourceOptions);@overload
def Etl(resource_name: str,
        args: EtlArgs,
        opts: Optional[ResourceOptions] = None)
@overload
def Etl(resource_name: str,
        opts: Optional[ResourceOptions] = None,
        etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
        script: Optional[str] = None,
        project: Optional[str] = None,
        logstore: Optional[str] = None,
        display_name: Optional[str] = None,
        etl_name: Optional[str] = None,
        kms_encrypted_access_key_secret: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        from_time: Optional[int] = None,
        kms_encrypted_access_key_id: Optional[str] = None,
        access_key_id: Optional[str] = None,
        kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
        kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
        last_modified_time: Optional[int] = None,
        description: Optional[str] = None,
        etl_type: Optional[str] = None,
        create_time: Optional[int] = None,
        role_arn: Optional[str] = None,
        schedule: Optional[str] = None,
        access_key_secret: Optional[str] = None,
        status: Optional[str] = None,
        to_time: Optional[int] = None,
        version: Optional[int] = None)func NewEtl(ctx *Context, name string, args EtlArgs, opts ...ResourceOption) (*Etl, error)public Etl(string name, EtlArgs args, CustomResourceOptions? opts = null)type: alicloud:log:Etl
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
 - The unique name of the resource.
 - args EtlArgs
 - The arguments to resource properties.
 - opts CustomResourceOptions
 - Bag of options to control resource's behavior.
 
- resource_name str
 - The unique name of the resource.
 - args EtlArgs
 - The arguments to resource properties.
 - opts ResourceOptions
 - Bag of options to control resource's behavior.
 
- ctx Context
 - Context object for the current deployment.
 - name string
 - The unique name of the resource.
 - args EtlArgs
 - The arguments to resource properties.
 - opts ResourceOption
 - Bag of options to control resource's behavior.
 
- name string
 - The unique name of the resource.
 - args EtlArgs
 - The arguments to resource properties.
 - opts CustomResourceOptions
 - Bag of options to control resource's behavior.
 
- name String
 - The unique name of the resource.
 - args EtlArgs
 - The arguments to resource properties.
 - options CustomResourceOptions
 - Bag of options to control resource's behavior.
 
Constructor example
The following reference example uses placeholder values for all input properties.
var etlResource = new AliCloud.Log.Etl("etlResource", new()
{
    EtlSinks = new[]
    {
        new AliCloud.Log.Inputs.EtlEtlSinkArgs
        {
            Endpoint = "string",
            Logstore = "string",
            Name = "string",
            Project = "string",
            AccessKeyId = "string",
            AccessKeySecret = "string",
            KmsEncryptedAccessKeyId = "string",
            KmsEncryptedAccessKeySecret = "string",
            RoleArn = "string",
            Type = "string",
        },
    },
    Script = "string",
    Project = "string",
    Logstore = "string",
    DisplayName = "string",
    EtlName = "string",
    KmsEncryptedAccessKeySecret = "string",
    Parameters = 
    {
        { "string", "string" },
    },
    FromTime = 0,
    KmsEncryptedAccessKeyId = "string",
    AccessKeyId = "string",
    KmsEncryptionAccessKeyIdContext = 
    {
        { "string", "any" },
    },
    KmsEncryptionAccessKeySecretContext = 
    {
        { "string", "any" },
    },
    LastModifiedTime = 0,
    Description = "string",
    EtlType = "string",
    CreateTime = 0,
    RoleArn = "string",
    Schedule = "string",
    AccessKeySecret = "string",
    Status = "string",
    ToTime = 0,
    Version = 0,
});
example, err := log.NewEtl(ctx, "etlResource", &log.EtlArgs{
	EtlSinks: log.EtlEtlSinkArray{
		&log.EtlEtlSinkArgs{
			Endpoint:                    pulumi.String("string"),
			Logstore:                    pulumi.String("string"),
			Name:                        pulumi.String("string"),
			Project:                     pulumi.String("string"),
			AccessKeyId:                 pulumi.String("string"),
			AccessKeySecret:             pulumi.String("string"),
			KmsEncryptedAccessKeyId:     pulumi.String("string"),
			KmsEncryptedAccessKeySecret: pulumi.String("string"),
			RoleArn:                     pulumi.String("string"),
			Type:                        pulumi.String("string"),
		},
	},
	Script:                      pulumi.String("string"),
	Project:                     pulumi.String("string"),
	Logstore:                    pulumi.String("string"),
	DisplayName:                 pulumi.String("string"),
	EtlName:                     pulumi.String("string"),
	KmsEncryptedAccessKeySecret: pulumi.String("string"),
	Parameters: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	FromTime:                pulumi.Int(0),
	KmsEncryptedAccessKeyId: pulumi.String("string"),
	AccessKeyId:             pulumi.String("string"),
	KmsEncryptionAccessKeyIdContext: pulumi.Map{
		"string": pulumi.Any("any"),
	},
	KmsEncryptionAccessKeySecretContext: pulumi.Map{
		"string": pulumi.Any("any"),
	},
	LastModifiedTime: pulumi.Int(0),
	Description:      pulumi.String("string"),
	EtlType:          pulumi.String("string"),
	CreateTime:       pulumi.Int(0),
	RoleArn:          pulumi.String("string"),
	Schedule:         pulumi.String("string"),
	AccessKeySecret:  pulumi.String("string"),
	Status:           pulumi.String("string"),
	ToTime:           pulumi.Int(0),
	Version:          pulumi.Int(0),
})
var etlResource = new Etl("etlResource", EtlArgs.builder()
    .etlSinks(EtlEtlSinkArgs.builder()
        .endpoint("string")
        .logstore("string")
        .name("string")
        .project("string")
        .accessKeyId("string")
        .accessKeySecret("string")
        .kmsEncryptedAccessKeyId("string")
        .kmsEncryptedAccessKeySecret("string")
        .roleArn("string")
        .type("string")
        .build())
    .script("string")
    .project("string")
    .logstore("string")
    .displayName("string")
    .etlName("string")
    .kmsEncryptedAccessKeySecret("string")
    .parameters(Map.of("string", "string"))
    .fromTime(0)
    .kmsEncryptedAccessKeyId("string")
    .accessKeyId("string")
    .kmsEncryptionAccessKeyIdContext(Map.of("string", "any"))
    .kmsEncryptionAccessKeySecretContext(Map.of("string", "any"))
    .lastModifiedTime(0)
    .description("string")
    .etlType("string")
    .createTime(0)
    .roleArn("string")
    .schedule("string")
    .accessKeySecret("string")
    .status("string")
    .toTime(0)
    .version(0)
    .build());
etl_resource = alicloud.log.Etl("etlResource",
    etl_sinks=[alicloud.log.EtlEtlSinkArgs(
        endpoint="string",
        logstore="string",
        name="string",
        project="string",
        access_key_id="string",
        access_key_secret="string",
        kms_encrypted_access_key_id="string",
        kms_encrypted_access_key_secret="string",
        role_arn="string",
        type="string",
    )],
    script="string",
    project="string",
    logstore="string",
    display_name="string",
    etl_name="string",
    kms_encrypted_access_key_secret="string",
    parameters={
        "string": "string",
    },
    from_time=0,
    kms_encrypted_access_key_id="string",
    access_key_id="string",
    kms_encryption_access_key_id_context={
        "string": "any",
    },
    kms_encryption_access_key_secret_context={
        "string": "any",
    },
    last_modified_time=0,
    description="string",
    etl_type="string",
    create_time=0,
    role_arn="string",
    schedule="string",
    access_key_secret="string",
    status="string",
    to_time=0,
    version=0)
const etlResource = new alicloud.log.Etl("etlResource", {
    etlSinks: [{
        endpoint: "string",
        logstore: "string",
        name: "string",
        project: "string",
        accessKeyId: "string",
        accessKeySecret: "string",
        kmsEncryptedAccessKeyId: "string",
        kmsEncryptedAccessKeySecret: "string",
        roleArn: "string",
        type: "string",
    }],
    script: "string",
    project: "string",
    logstore: "string",
    displayName: "string",
    etlName: "string",
    kmsEncryptedAccessKeySecret: "string",
    parameters: {
        string: "string",
    },
    fromTime: 0,
    kmsEncryptedAccessKeyId: "string",
    accessKeyId: "string",
    kmsEncryptionAccessKeyIdContext: {
        string: "any",
    },
    kmsEncryptionAccessKeySecretContext: {
        string: "any",
    },
    lastModifiedTime: 0,
    description: "string",
    etlType: "string",
    createTime: 0,
    roleArn: "string",
    schedule: "string",
    accessKeySecret: "string",
    status: "string",
    toTime: 0,
    version: 0,
});
type: alicloud:log:Etl
properties:
    accessKeyId: string
    accessKeySecret: string
    createTime: 0
    description: string
    displayName: string
    etlName: string
    etlSinks:
        - accessKeyId: string
          accessKeySecret: string
          endpoint: string
          kmsEncryptedAccessKeyId: string
          kmsEncryptedAccessKeySecret: string
          logstore: string
          name: string
          project: string
          roleArn: string
          type: string
    etlType: string
    fromTime: 0
    kmsEncryptedAccessKeyId: string
    kmsEncryptedAccessKeySecret: string
    kmsEncryptionAccessKeyIdContext:
        string: any
    kmsEncryptionAccessKeySecretContext:
        string: any
    lastModifiedTime: 0
    logstore: string
    parameters:
        string: string
    project: string
    roleArn: string
    schedule: string
    script: string
    status: string
    toTime: 0
    version: 0
Etl Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Etl resource accepts the following input properties:
- Display
Name string - Log service etl job alias.
 - Etl
Name string - The name of the log etl job.
 - Etl
Sinks List<Pulumi.Ali Cloud. Log. Inputs. Etl Etl Sink>  - Target logstore configuration for delivery after data processing.
 - Logstore string
 - The source logstore of the processing job.
 - Project string
 - The name of the project where the etl job is located.
 - Script string
 - Processing operation grammar.
 - Access
Key stringId  - Source logstore access key id.
 - Access
Key stringSecret  - Source logstore access key secret.
 - Create
Time int - The etl job create time.
 - Description string
 - Description of the log etl job.
 - Etl
Type string - Log service etl type, the default value is 
ETL. - From
Time int - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - Kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - Kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - Kms
Encryption Dictionary<string, object>Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Kms
Encryption Dictionary<string, object>Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Last
Modified intTime  - ETL job last modified time.
 - Parameters Dictionary<string, string>
 - Advanced parameter configuration of processing operations.
 - Role
Arn string - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - Schedule string
 - Job scheduling type, the default value is Resident.
 - Status string
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - To
Time int - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - Version int
 - Log etl job version. the default value is 
2. 
- Display
Name string - Log service etl job alias.
 - Etl
Name string - The name of the log etl job.
 - Etl
Sinks []EtlEtl Sink Args  - Target logstore configuration for delivery after data processing.
 - Logstore string
 - The source logstore of the processing job.
 - Project string
 - The name of the project where the etl job is located.
 - Script string
 - Processing operation grammar.
 - Access
Key stringId  - Source logstore access key id.
 - Access
Key stringSecret  - Source logstore access key secret.
 - Create
Time int - The etl job create time.
 - Description string
 - Description of the log etl job.
 - Etl
Type string - Log service etl type, the default value is 
ETL. - From
Time int - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - Kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - Kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - Kms
Encryption map[string]interface{}Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Kms
Encryption map[string]interface{}Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Last
Modified intTime  - ETL job last modified time.
 - Parameters map[string]string
 - Advanced parameter configuration of processing operations.
 - Role
Arn string - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - Schedule string
 - Job scheduling type, the default value is Resident.
 - Status string
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - To
Time int - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - Version int
 - Log etl job version. the default value is 
2. 
- display
Name String - Log service etl job alias.
 - etl
Name String - The name of the log etl job.
 - etl
Sinks List<EtlEtl Sink>  - Target logstore configuration for delivery after data processing.
 - logstore String
 - The source logstore of the processing job.
 - project String
 - The name of the project where the etl job is located.
 - script String
 - Processing operation grammar.
 - access
Key StringId  - Source logstore access key id.
 - access
Key StringSecret  - Source logstore access key secret.
 - create
Time Integer - The etl job create time.
 - description String
 - Description of the log etl job.
 - etl
Type String - Log service etl type, the default value is 
ETL. - from
Time Integer - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms
Encrypted StringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted StringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms
Encryption Map<String,Object>Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms
Encryption Map<String,Object>Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last
Modified IntegerTime  - ETL job last modified time.
 - parameters Map<String,String>
 - Advanced parameter configuration of processing operations.
 - role
Arn String - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule String
 - Job scheduling type, the default value is Resident.
 - status String
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to
Time Integer - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version Integer
 - Log etl job version. the default value is 
2. 
- display
Name string - Log service etl job alias.
 - etl
Name string - The name of the log etl job.
 - etl
Sinks EtlEtl Sink[]  - Target logstore configuration for delivery after data processing.
 - logstore string
 - The source logstore of the processing job.
 - project string
 - The name of the project where the etl job is located.
 - script string
 - Processing operation grammar.
 - access
Key stringId  - Source logstore access key id.
 - access
Key stringSecret  - Source logstore access key secret.
 - create
Time number - The etl job create time.
 - description string
 - Description of the log etl job.
 - etl
Type string - Log service etl type, the default value is 
ETL. - from
Time number - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms
Encryption {[key: string]: any}Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms
Encryption {[key: string]: any}Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last
Modified numberTime  - ETL job last modified time.
 - parameters {[key: string]: string}
 - Advanced parameter configuration of processing operations.
 - role
Arn string - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule string
 - Job scheduling type, the default value is Resident.
 - status string
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to
Time number - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version number
 - Log etl job version. the default value is 
2. 
- display_
name str - Log service etl job alias.
 - etl_
name str - The name of the log etl job.
 - etl_
sinks Sequence[EtlEtl Sink Args]  - Target logstore configuration for delivery after data processing.
 - logstore str
 - The source logstore of the processing job.
 - project str
 - The name of the project where the etl job is located.
 - script str
 - Processing operation grammar.
 - access_
key_ strid  - Source logstore access key id.
 - access_
key_ strsecret  - Source logstore access key secret.
 - create_
time int - The etl job create time.
 - description str
 - Description of the log etl job.
 - etl_
type str - Log service etl type, the default value is 
ETL. - from_
time int - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms_
encrypted_ straccess_ key_ id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms_
encrypted_ straccess_ key_ secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms_
encryption_ Mapping[str, Any]access_ key_ id_ context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms_
encryption_ Mapping[str, Any]access_ key_ secret_ context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last_
modified_ inttime  - ETL job last modified time.
 - parameters Mapping[str, str]
 - Advanced parameter configuration of processing operations.
 - role_
arn str - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule str
 - Job scheduling type, the default value is Resident.
 - status str
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to_
time int - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version int
 - Log etl job version. the default value is 
2. 
- display
Name String - Log service etl job alias.
 - etl
Name String - The name of the log etl job.
 - etl
Sinks List<Property Map> - Target logstore configuration for delivery after data processing.
 - logstore String
 - The source logstore of the processing job.
 - project String
 - The name of the project where the etl job is located.
 - script String
 - Processing operation grammar.
 - access
Key StringId  - Source logstore access key id.
 - access
Key StringSecret  - Source logstore access key secret.
 - create
Time Number - The etl job create time.
 - description String
 - Description of the log etl job.
 - etl
Type String - Log service etl type, the default value is 
ETL. - from
Time Number - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms
Encrypted StringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted StringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms
Encryption Map<Any>Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms
Encryption Map<Any>Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last
Modified NumberTime  - ETL job last modified time.
 - parameters Map<String>
 - Advanced parameter configuration of processing operations.
 - role
Arn String - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule String
 - Job scheduling type, the default value is Resident.
 - status String
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to
Time Number - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version Number
 - Log etl job version. the default value is 
2. 
Outputs
All input properties are implicitly available as output properties. Additionally, the Etl resource produces the following output properties:
- Id string
 - The provider-assigned unique ID for this managed resource.
 
- Id string
 - The provider-assigned unique ID for this managed resource.
 
- id String
 - The provider-assigned unique ID for this managed resource.
 
- id string
 - The provider-assigned unique ID for this managed resource.
 
- id str
 - The provider-assigned unique ID for this managed resource.
 
- id String
 - The provider-assigned unique ID for this managed resource.
 
Look up Existing Etl Resource
Get an existing Etl resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: EtlState, opts?: CustomResourceOptions): Etl@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        access_key_id: Optional[str] = None,
        access_key_secret: Optional[str] = None,
        create_time: Optional[int] = None,
        description: Optional[str] = None,
        display_name: Optional[str] = None,
        etl_name: Optional[str] = None,
        etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
        etl_type: Optional[str] = None,
        from_time: Optional[int] = None,
        kms_encrypted_access_key_id: Optional[str] = None,
        kms_encrypted_access_key_secret: Optional[str] = None,
        kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
        kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
        last_modified_time: Optional[int] = None,
        logstore: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        project: Optional[str] = None,
        role_arn: Optional[str] = None,
        schedule: Optional[str] = None,
        script: Optional[str] = None,
        status: Optional[str] = None,
        to_time: Optional[int] = None,
        version: Optional[int] = None) -> Etlfunc GetEtl(ctx *Context, name string, id IDInput, state *EtlState, opts ...ResourceOption) (*Etl, error)public static Etl Get(string name, Input<string> id, EtlState? state, CustomResourceOptions? opts = null)public static Etl get(String name, Output<String> id, EtlState state, CustomResourceOptions options)Resource lookup is not supported in YAML- name
 - The unique name of the resulting resource.
 - id
 - The unique provider ID of the resource to lookup.
 - state
 - Any extra arguments used during the lookup.
 - opts
 - A bag of options that control this resource's behavior.
 
- resource_name
 - The unique name of the resulting resource.
 - id
 - The unique provider ID of the resource to lookup.
 
- name
 - The unique name of the resulting resource.
 - id
 - The unique provider ID of the resource to lookup.
 - state
 - Any extra arguments used during the lookup.
 - opts
 - A bag of options that control this resource's behavior.
 
- name
 - The unique name of the resulting resource.
 - id
 - The unique provider ID of the resource to lookup.
 - state
 - Any extra arguments used during the lookup.
 - opts
 - A bag of options that control this resource's behavior.
 
- name
 - The unique name of the resulting resource.
 - id
 - The unique provider ID of the resource to lookup.
 - state
 - Any extra arguments used during the lookup.
 - opts
 - A bag of options that control this resource's behavior.
 
- Access
Key stringId  - Source logstore access key id.
 - Access
Key stringSecret  - Source logstore access key secret.
 - Create
Time int - The etl job create time.
 - Description string
 - Description of the log etl job.
 - Display
Name string - Log service etl job alias.
 - Etl
Name string - The name of the log etl job.
 - Etl
Sinks List<Pulumi.Ali Cloud. Log. Inputs. Etl Etl Sink>  - Target logstore configuration for delivery after data processing.
 - Etl
Type string - Log service etl type, the default value is 
ETL. - From
Time int - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - Kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - Kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - Kms
Encryption Dictionary<string, object>Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Kms
Encryption Dictionary<string, object>Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Last
Modified intTime  - ETL job last modified time.
 - Logstore string
 - The source logstore of the processing job.
 - Parameters Dictionary<string, string>
 - Advanced parameter configuration of processing operations.
 - Project string
 - The name of the project where the etl job is located.
 - Role
Arn string - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - Schedule string
 - Job scheduling type, the default value is Resident.
 - Script string
 - Processing operation grammar.
 - Status string
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - To
Time int - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - Version int
 - Log etl job version. the default value is 
2. 
- Access
Key stringId  - Source logstore access key id.
 - Access
Key stringSecret  - Source logstore access key secret.
 - Create
Time int - The etl job create time.
 - Description string
 - Description of the log etl job.
 - Display
Name string - Log service etl job alias.
 - Etl
Name string - The name of the log etl job.
 - Etl
Sinks []EtlEtl Sink Args  - Target logstore configuration for delivery after data processing.
 - Etl
Type string - Log service etl type, the default value is 
ETL. - From
Time int - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - Kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - Kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - Kms
Encryption map[string]interface{}Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Kms
Encryption map[string]interface{}Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - Last
Modified intTime  - ETL job last modified time.
 - Logstore string
 - The source logstore of the processing job.
 - Parameters map[string]string
 - Advanced parameter configuration of processing operations.
 - Project string
 - The name of the project where the etl job is located.
 - Role
Arn string - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - Schedule string
 - Job scheduling type, the default value is Resident.
 - Script string
 - Processing operation grammar.
 - Status string
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - To
Time int - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - Version int
 - Log etl job version. the default value is 
2. 
- access
Key StringId  - Source logstore access key id.
 - access
Key StringSecret  - Source logstore access key secret.
 - create
Time Integer - The etl job create time.
 - description String
 - Description of the log etl job.
 - display
Name String - Log service etl job alias.
 - etl
Name String - The name of the log etl job.
 - etl
Sinks List<EtlEtl Sink>  - Target logstore configuration for delivery after data processing.
 - etl
Type String - Log service etl type, the default value is 
ETL. - from
Time Integer - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms
Encrypted StringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted StringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms
Encryption Map<String,Object>Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms
Encryption Map<String,Object>Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last
Modified IntegerTime  - ETL job last modified time.
 - logstore String
 - The source logstore of the processing job.
 - parameters Map<String,String>
 - Advanced parameter configuration of processing operations.
 - project String
 - The name of the project where the etl job is located.
 - role
Arn String - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule String
 - Job scheduling type, the default value is Resident.
 - script String
 - Processing operation grammar.
 - status String
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to
Time Integer - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version Integer
 - Log etl job version. the default value is 
2. 
- access
Key stringId  - Source logstore access key id.
 - access
Key stringSecret  - Source logstore access key secret.
 - create
Time number - The etl job create time.
 - description string
 - Description of the log etl job.
 - display
Name string - Log service etl job alias.
 - etl
Name string - The name of the log etl job.
 - etl
Sinks EtlEtl Sink[]  - Target logstore configuration for delivery after data processing.
 - etl
Type string - Log service etl type, the default value is 
ETL. - from
Time number - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms
Encryption {[key: string]: any}Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms
Encryption {[key: string]: any}Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last
Modified numberTime  - ETL job last modified time.
 - logstore string
 - The source logstore of the processing job.
 - parameters {[key: string]: string}
 - Advanced parameter configuration of processing operations.
 - project string
 - The name of the project where the etl job is located.
 - role
Arn string - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule string
 - Job scheduling type, the default value is Resident.
 - script string
 - Processing operation grammar.
 - status string
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to
Time number - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version number
 - Log etl job version. the default value is 
2. 
- access_
key_ strid  - Source logstore access key id.
 - access_
key_ strsecret  - Source logstore access key secret.
 - create_
time int - The etl job create time.
 - description str
 - Description of the log etl job.
 - display_
name str - Log service etl job alias.
 - etl_
name str - The name of the log etl job.
 - etl_
sinks Sequence[EtlEtl Sink Args]  - Target logstore configuration for delivery after data processing.
 - etl_
type str - Log service etl type, the default value is 
ETL. - from_
time int - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms_
encrypted_ straccess_ key_ id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms_
encrypted_ straccess_ key_ secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms_
encryption_ Mapping[str, Any]access_ key_ id_ context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms_
encryption_ Mapping[str, Any]access_ key_ secret_ context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last_
modified_ inttime  - ETL job last modified time.
 - logstore str
 - The source logstore of the processing job.
 - parameters Mapping[str, str]
 - Advanced parameter configuration of processing operations.
 - project str
 - The name of the project where the etl job is located.
 - role_
arn str - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule str
 - Job scheduling type, the default value is Resident.
 - script str
 - Processing operation grammar.
 - status str
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to_
time int - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version int
 - Log etl job version. the default value is 
2. 
- access
Key StringId  - Source logstore access key id.
 - access
Key StringSecret  - Source logstore access key secret.
 - create
Time Number - The etl job create time.
 - description String
 - Description of the log etl job.
 - display
Name String - Log service etl job alias.
 - etl
Name String - The name of the log etl job.
 - etl
Sinks List<Property Map> - Target logstore configuration for delivery after data processing.
 - etl
Type String - Log service etl type, the default value is 
ETL. - from
Time Number - The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
 - kms
Encrypted StringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted StringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - kms
Encryption Map<Any>Access Key Id Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_idbefore creating or updating an instance withkms_encrypted_access_key_id. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - kms
Encryption Map<Any>Access Key Secret Context  - An KMS encryption context used to decrypt 
kms_encrypted_access_key_secretbefore creating or updating an instance withkms_encrypted_access_key_secret. See Encryption Context. It is valid whenkms_encrypted_passwordis set. When it is changed, the instance will reboot to make the change take effect. - last
Modified NumberTime  - ETL job last modified time.
 - logstore String
 - The source logstore of the processing job.
 - parameters Map<String>
 - Advanced parameter configuration of processing operations.
 - project String
 - The name of the project where the etl job is located.
 - role
Arn String - Sts role info under source logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - schedule String
 - Job scheduling type, the default value is Resident.
 - script String
 - Processing operation grammar.
 - status String
 - Log project tags. the default value is RUNNING, Only 4 values are supported: 
STARTING,RUNNING,STOPPING,STOPPED. - to
Time Number - Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
 - version Number
 - Log etl job version. the default value is 
2. 
Supporting Types
EtlEtlSink, EtlEtlSinkArgs      
- Endpoint string
 - Delivery target logstore region.
 - Logstore string
 - Delivery target logstore.
 - Name string
 - Delivery target name.
 - Project string
 - The project where the target logstore is delivered.
 - Access
Key stringId  - Delivery target logstore access key id.
 - Access
Key stringSecret  - Delivery target logstore access key secret.
 - Kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - Kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - Role
Arn string - Sts role info under delivery target logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - Type string
 ETL sinks type, the default value is AliyunLOG.
Note:
from_timeandto_timeno modification allowed after successful creation.
- Endpoint string
 - Delivery target logstore region.
 - Logstore string
 - Delivery target logstore.
 - Name string
 - Delivery target name.
 - Project string
 - The project where the target logstore is delivered.
 - Access
Key stringId  - Delivery target logstore access key id.
 - Access
Key stringSecret  - Delivery target logstore access key secret.
 - Kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - Kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - Role
Arn string - Sts role info under delivery target logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - Type string
 ETL sinks type, the default value is AliyunLOG.
Note:
from_timeandto_timeno modification allowed after successful creation.
- endpoint String
 - Delivery target logstore region.
 - logstore String
 - Delivery target logstore.
 - name String
 - Delivery target name.
 - project String
 - The project where the target logstore is delivered.
 - access
Key StringId  - Delivery target logstore access key id.
 - access
Key StringSecret  - Delivery target logstore access key secret.
 - kms
Encrypted StringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted StringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - role
Arn String - Sts role info under delivery target logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - type String
 ETL sinks type, the default value is AliyunLOG.
Note:
from_timeandto_timeno modification allowed after successful creation.
- endpoint string
 - Delivery target logstore region.
 - logstore string
 - Delivery target logstore.
 - name string
 - Delivery target name.
 - project string
 - The project where the target logstore is delivered.
 - access
Key stringId  - Delivery target logstore access key id.
 - access
Key stringSecret  - Delivery target logstore access key secret.
 - kms
Encrypted stringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted stringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - role
Arn string - Sts role info under delivery target logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - type string
 ETL sinks type, the default value is AliyunLOG.
Note:
from_timeandto_timeno modification allowed after successful creation.
- endpoint str
 - Delivery target logstore region.
 - logstore str
 - Delivery target logstore.
 - name str
 - Delivery target name.
 - project str
 - The project where the target logstore is delivered.
 - access_
key_ strid  - Delivery target logstore access key id.
 - access_
key_ strsecret  - Delivery target logstore access key secret.
 - kms_
encrypted_ straccess_ key_ id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms_
encrypted_ straccess_ key_ secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - role_
arn str - Sts role info under delivery target logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - type str
 ETL sinks type, the default value is AliyunLOG.
Note:
from_timeandto_timeno modification allowed after successful creation.
- endpoint String
 - Delivery target logstore region.
 - logstore String
 - Delivery target logstore.
 - name String
 - Delivery target name.
 - project String
 - The project where the target logstore is delivered.
 - access
Key StringId  - Delivery target logstore access key id.
 - access
Key StringSecret  - Delivery target logstore access key secret.
 - kms
Encrypted StringAccess Key Id  - An KMS encrypts access key id used to a log etl job. If the 
access_key_idis filled in, this field will be ignored. - kms
Encrypted StringAccess Key Secret  - An KMS encrypts access key secret used to a log etl job. If the 
access_key_secretis filled in, this field will be ignored. - role
Arn String - Sts role info under delivery target logstore. 
role_arnand(access_key_id, access_key_secret)fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)to use KMS to get the key pair. - type String
 ETL sinks type, the default value is AliyunLOG.
Note:
from_timeandto_timeno modification allowed after successful creation.
Import
Log etl can be imported using the id, e.g.
$ pulumi import alicloud:log/etl:Etl example tf-log-project:tf-log-etl-name
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
 - Alibaba Cloud pulumi/pulumi-alicloud
 - License
 - Apache-2.0
 - Notes
 - This Pulumi package is based on the 
alicloudTerraform Provider.