airbyte.DestinationSnowflakeCortex
Explore with Pulumi AI
DestinationSnowflakeCortex Resource
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.DestinationSnowflakeCortex;
import com.pulumi.airbyte.DestinationSnowflakeCortexArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingFakeArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationIndexingArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var myDestinationSnowflakecortex = new DestinationSnowflakeCortex("myDestinationSnowflakecortex", DestinationSnowflakeCortexArgs.builder()
            .configuration(DestinationSnowflakeCortexConfigurationArgs.builder()
                .embedding(DestinationSnowflakeCortexConfigurationEmbeddingArgs.builder()
                    .azureOpenAi(DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs.builder()
                        .apiBase("https://your-resource-name.openai.azure.com")
                        .deployment("your-resource-name")
                        .openaiKey("...my_openai_key...")
                        .build())
                    .cohere(DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs.builder()
                        .cohereKey("...my_cohere_key...")
                        .build())
                    .fake()
                    .openAi(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs.builder()
                        .openaiKey("...my_openai_key...")
                        .build())
                    .openAiCompatible(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs.builder()
                        .apiKey("...my_api_key...")
                        .baseUrl("https://your-service-name.com")
                        .dimensions(1536)
                        .modelName("text-embedding-ada-002")
                        .build())
                    .build())
                .indexing(DestinationSnowflakeCortexConfigurationIndexingArgs.builder()
                    .credentials(DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs.builder()
                        .password("AIRBYTE_PASSWORD")
                        .build())
                    .database("AIRBYTE_DATABASE")
                    .defaultSchema("AIRBYTE_SCHEMA")
                    .host("AIRBYTE_ACCOUNT")
                    .role("AIRBYTE_ROLE")
                    .username("AIRBYTE_USER")
                    .warehouse("AIRBYTE_WAREHOUSE")
                    .build())
                .omit_raw_text(true)
                .processing(DestinationSnowflakeCortexConfigurationProcessingArgs.builder()
                    .chunkOverlap(3)
                    .chunkSize(6147)
                    .fieldNameMappings(DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs.builder()
                        .fromField("...my_from_field...")
                        .toField("...my_to_field...")
                        .build())
                    .metadataFields("...")
                    .textFields("...")
                    .textSplitter(DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs.builder()
                        .byMarkdownHeader(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs.builder()
                            .splitLevel(3)
                            .build())
                        .byProgrammingLanguage(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs.builder()
                            .language("rst")
                            .build())
                        .bySeparator(DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs.builder()
                            .keepSeparator(true)
                            .separators("...")
                            .build())
                        .build())
                    .build())
                .build())
            .definitionId("4e970f65-b8a4-4398-b19e-2a5644731a72")
            .workspaceId("d33dd7fd-91b5-4245-9a6e-0c987c8003c9")
            .build());
    }
}
resources:
  myDestinationSnowflakecortex:
    type: airbyte:DestinationSnowflakeCortex
    properties:
      configuration:
        embedding:
          azureOpenAi:
            apiBase: https://your-resource-name.openai.azure.com
            deployment: your-resource-name
            openaiKey: '...my_openai_key...'
          cohere:
            cohereKey: '...my_cohere_key...'
          fake: {}
          openAi:
            openaiKey: '...my_openai_key...'
          openAiCompatible:
            apiKey: '...my_api_key...'
            baseUrl: https://your-service-name.com
            dimensions: 1536
            modelName: text-embedding-ada-002
        indexing:
          credentials:
            password: AIRBYTE_PASSWORD
          database: AIRBYTE_DATABASE
          defaultSchema: AIRBYTE_SCHEMA
          host: AIRBYTE_ACCOUNT
          role: AIRBYTE_ROLE
          username: AIRBYTE_USER
          warehouse: AIRBYTE_WAREHOUSE
        omit_raw_text: true
        processing:
          chunkOverlap: 3
          chunkSize: 6147
          fieldNameMappings:
            - fromField: '...my_from_field...'
              toField: '...my_to_field...'
          metadataFields:
            - '...'
          textFields:
            - '...'
          textSplitter:
            byMarkdownHeader:
              splitLevel: 3
            byProgrammingLanguage:
              language: rst
            bySeparator:
              keepSeparator: true
              separators:
                - '...'
      definitionId: 4e970f65-b8a4-4398-b19e-2a5644731a72
      workspaceId: d33dd7fd-91b5-4245-9a6e-0c987c8003c9
Create DestinationSnowflakeCortex Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DestinationSnowflakeCortex(name: string, args: DestinationSnowflakeCortexArgs, opts?: CustomResourceOptions);@overload
def DestinationSnowflakeCortex(resource_name: str,
                               args: DestinationSnowflakeCortexArgs,
                               opts: Optional[ResourceOptions] = None)
@overload
def DestinationSnowflakeCortex(resource_name: str,
                               opts: Optional[ResourceOptions] = None,
                               configuration: Optional[DestinationSnowflakeCortexConfigurationArgs] = None,
                               workspace_id: Optional[str] = None,
                               definition_id: Optional[str] = None,
                               name: Optional[str] = None)func NewDestinationSnowflakeCortex(ctx *Context, name string, args DestinationSnowflakeCortexArgs, opts ...ResourceOption) (*DestinationSnowflakeCortex, error)public DestinationSnowflakeCortex(string name, DestinationSnowflakeCortexArgs args, CustomResourceOptions? opts = null)
public DestinationSnowflakeCortex(String name, DestinationSnowflakeCortexArgs args)
public DestinationSnowflakeCortex(String name, DestinationSnowflakeCortexArgs args, CustomResourceOptions options)
type: airbyte:DestinationSnowflakeCortex
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var destinationSnowflakeCortexResource = new Airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource", new()
{
    Configuration = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationArgs
    {
        Embedding = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingArgs
        {
            AzureOpenAi = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs
            {
                ApiBase = "string",
                Deployment = "string",
                OpenaiKey = "string",
            },
            Cohere = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs
            {
                CohereKey = "string",
            },
            Fake = null,
            OpenAi = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs
            {
                OpenaiKey = "string",
            },
            OpenAiCompatible = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs
            {
                BaseUrl = "string",
                Dimensions = 0,
                ApiKey = "string",
                ModelName = "string",
            },
        },
        Indexing = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationIndexingArgs
        {
            Credentials = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs
            {
                Password = "string",
            },
            Database = "string",
            DefaultSchema = "string",
            Host = "string",
            Role = "string",
            Username = "string",
            Warehouse = "string",
        },
        Processing = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingArgs
        {
            ChunkSize = 0,
            ChunkOverlap = 0,
            FieldNameMappings = new[]
            {
                new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs
                {
                    FromField = "string",
                    ToField = "string",
                },
            },
            MetadataFields = new[]
            {
                "string",
            },
            TextFields = new[]
            {
                "string",
            },
            TextSplitter = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs
            {
                ByMarkdownHeader = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs
                {
                    SplitLevel = 0,
                },
                ByProgrammingLanguage = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs
                {
                    Language = "string",
                },
                BySeparator = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs
                {
                    KeepSeparator = false,
                    Separators = new[]
                    {
                        "string",
                    },
                },
            },
        },
        OmitRawText = false,
    },
    WorkspaceId = "string",
    DefinitionId = "string",
    Name = "string",
});
example, err := airbyte.NewDestinationSnowflakeCortex(ctx, "destinationSnowflakeCortexResource", &airbyte.DestinationSnowflakeCortexArgs{
Configuration: &.DestinationSnowflakeCortexConfigurationArgs{
Embedding: &.DestinationSnowflakeCortexConfigurationEmbeddingArgs{
AzureOpenAi: &.DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs{
ApiBase: pulumi.String("string"),
Deployment: pulumi.String("string"),
OpenaiKey: pulumi.String("string"),
},
Cohere: &.DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs{
CohereKey: pulumi.String("string"),
},
Fake: &.DestinationSnowflakeCortexConfigurationEmbeddingFakeArgs{
},
OpenAi: &.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs{
OpenaiKey: pulumi.String("string"),
},
OpenAiCompatible: &.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs{
BaseUrl: pulumi.String("string"),
Dimensions: pulumi.Float64(0),
ApiKey: pulumi.String("string"),
ModelName: pulumi.String("string"),
},
},
Indexing: &.DestinationSnowflakeCortexConfigurationIndexingArgs{
Credentials: &.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs{
Password: pulumi.String("string"),
},
Database: pulumi.String("string"),
DefaultSchema: pulumi.String("string"),
Host: pulumi.String("string"),
Role: pulumi.String("string"),
Username: pulumi.String("string"),
Warehouse: pulumi.String("string"),
},
Processing: &.DestinationSnowflakeCortexConfigurationProcessingArgs{
ChunkSize: pulumi.Float64(0),
ChunkOverlap: pulumi.Float64(0),
FieldNameMappings: .DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArray{
&.DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs{
FromField: pulumi.String("string"),
ToField: pulumi.String("string"),
},
},
MetadataFields: pulumi.StringArray{
pulumi.String("string"),
},
TextFields: pulumi.StringArray{
pulumi.String("string"),
},
TextSplitter: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs{
ByMarkdownHeader: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs{
SplitLevel: pulumi.Float64(0),
},
ByProgrammingLanguage: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs{
Language: pulumi.String("string"),
},
BySeparator: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs{
KeepSeparator: pulumi.Bool(false),
Separators: pulumi.StringArray{
pulumi.String("string"),
},
},
},
},
OmitRawText: pulumi.Bool(false),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
})
var destinationSnowflakeCortexResource = new DestinationSnowflakeCortex("destinationSnowflakeCortexResource", DestinationSnowflakeCortexArgs.builder()
    .configuration(DestinationSnowflakeCortexConfigurationArgs.builder()
        .embedding(DestinationSnowflakeCortexConfigurationEmbeddingArgs.builder()
            .azureOpenAi(DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs.builder()
                .apiBase("string")
                .deployment("string")
                .openaiKey("string")
                .build())
            .cohere(DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs.builder()
                .cohereKey("string")
                .build())
            .fake()
            .openAi(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs.builder()
                .openaiKey("string")
                .build())
            .openAiCompatible(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs.builder()
                .baseUrl("string")
                .dimensions(0)
                .apiKey("string")
                .modelName("string")
                .build())
            .build())
        .indexing(DestinationSnowflakeCortexConfigurationIndexingArgs.builder()
            .credentials(DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs.builder()
                .password("string")
                .build())
            .database("string")
            .defaultSchema("string")
            .host("string")
            .role("string")
            .username("string")
            .warehouse("string")
            .build())
        .processing(DestinationSnowflakeCortexConfigurationProcessingArgs.builder()
            .chunkSize(0)
            .chunkOverlap(0)
            .fieldNameMappings(DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs.builder()
                .fromField("string")
                .toField("string")
                .build())
            .metadataFields("string")
            .textFields("string")
            .textSplitter(DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs.builder()
                .byMarkdownHeader(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs.builder()
                    .splitLevel(0)
                    .build())
                .byProgrammingLanguage(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs.builder()
                    .language("string")
                    .build())
                .bySeparator(DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs.builder()
                    .keepSeparator(false)
                    .separators("string")
                    .build())
                .build())
            .build())
        .omitRawText(false)
        .build())
    .workspaceId("string")
    .definitionId("string")
    .name("string")
    .build());
destination_snowflake_cortex_resource = airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource",
    configuration={
        "embedding": {
            "azure_open_ai": {
                "api_base": "string",
                "deployment": "string",
                "openai_key": "string",
            },
            "cohere": {
                "cohere_key": "string",
            },
            "fake": {},
            "open_ai": {
                "openai_key": "string",
            },
            "open_ai_compatible": {
                "base_url": "string",
                "dimensions": 0,
                "api_key": "string",
                "model_name": "string",
            },
        },
        "indexing": {
            "credentials": {
                "password": "string",
            },
            "database": "string",
            "default_schema": "string",
            "host": "string",
            "role": "string",
            "username": "string",
            "warehouse": "string",
        },
        "processing": {
            "chunk_size": 0,
            "chunk_overlap": 0,
            "field_name_mappings": [{
                "from_field": "string",
                "to_field": "string",
            }],
            "metadata_fields": ["string"],
            "text_fields": ["string"],
            "text_splitter": {
                "by_markdown_header": {
                    "split_level": 0,
                },
                "by_programming_language": {
                    "language": "string",
                },
                "by_separator": {
                    "keep_separator": False,
                    "separators": ["string"],
                },
            },
        },
        "omit_raw_text": False,
    },
    workspace_id="string",
    definition_id="string",
    name="string")
const destinationSnowflakeCortexResource = new airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource", {
    configuration: {
        embedding: {
            azureOpenAi: {
                apiBase: "string",
                deployment: "string",
                openaiKey: "string",
            },
            cohere: {
                cohereKey: "string",
            },
            fake: {},
            openAi: {
                openaiKey: "string",
            },
            openAiCompatible: {
                baseUrl: "string",
                dimensions: 0,
                apiKey: "string",
                modelName: "string",
            },
        },
        indexing: {
            credentials: {
                password: "string",
            },
            database: "string",
            defaultSchema: "string",
            host: "string",
            role: "string",
            username: "string",
            warehouse: "string",
        },
        processing: {
            chunkSize: 0,
            chunkOverlap: 0,
            fieldNameMappings: [{
                fromField: "string",
                toField: "string",
            }],
            metadataFields: ["string"],
            textFields: ["string"],
            textSplitter: {
                byMarkdownHeader: {
                    splitLevel: 0,
                },
                byProgrammingLanguage: {
                    language: "string",
                },
                bySeparator: {
                    keepSeparator: false,
                    separators: ["string"],
                },
            },
        },
        omitRawText: false,
    },
    workspaceId: "string",
    definitionId: "string",
    name: "string",
});
type: airbyte:DestinationSnowflakeCortex
properties:
    configuration:
        embedding:
            azureOpenAi:
                apiBase: string
                deployment: string
                openaiKey: string
            cohere:
                cohereKey: string
            fake: {}
            openAi:
                openaiKey: string
            openAiCompatible:
                apiKey: string
                baseUrl: string
                dimensions: 0
                modelName: string
        indexing:
            credentials:
                password: string
            database: string
            defaultSchema: string
            host: string
            role: string
            username: string
            warehouse: string
        omitRawText: false
        processing:
            chunkOverlap: 0
            chunkSize: 0
            fieldNameMappings:
                - fromField: string
                  toField: string
            metadataFields:
                - string
            textFields:
                - string
            textSplitter:
                byMarkdownHeader:
                    splitLevel: 0
                byProgrammingLanguage:
                    language: string
                bySeparator:
                    keepSeparator: false
                    separators:
                        - string
    definitionId: string
    name: string
    workspaceId: string
DestinationSnowflakeCortex Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DestinationSnowflakeCortex resource accepts the following input properties:
- Configuration
DestinationSnowflake Cortex Configuration 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- WorkspaceId string
- DefinitionId string
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- Configuration
DestinationSnowflake Cortex Configuration Args 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- WorkspaceId string
- DefinitionId string
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
DestinationSnowflake Cortex Configuration 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspaceId String
- definitionId String
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
- configuration
DestinationSnowflake Cortex Configuration 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspaceId string
- definitionId string
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
DestinationSnowflake Cortex Configuration Args 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspace_id str
- definition_id str
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name str
- Name of the destination e.g. dev-mysql-instance.
- configuration Property Map
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspaceId String
- definitionId String
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
Outputs
All input properties are implicitly available as output properties. Additionally, the DestinationSnowflakeCortex resource produces the following output properties:
- CreatedAt double
- DestinationId string
- DestinationType string
- Id string
- The provider-assigned unique ID for this managed resource.
- CreatedAt float64
- DestinationId string
- DestinationType string
- Id string
- The provider-assigned unique ID for this managed resource.
- createdAt Double
- destinationId String
- destinationType String
- id String
- The provider-assigned unique ID for this managed resource.
- createdAt number
- destinationId string
- destinationType string
- id string
- The provider-assigned unique ID for this managed resource.
- created_at float
- destination_id str
- destination_type str
- id str
- The provider-assigned unique ID for this managed resource.
- createdAt Number
- destinationId String
- destinationType String
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing DestinationSnowflakeCortex Resource
Get an existing DestinationSnowflakeCortex resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DestinationSnowflakeCortexState, opts?: CustomResourceOptions): DestinationSnowflakeCortex@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        configuration: Optional[DestinationSnowflakeCortexConfigurationArgs] = None,
        created_at: Optional[float] = None,
        definition_id: Optional[str] = None,
        destination_id: Optional[str] = None,
        destination_type: Optional[str] = None,
        name: Optional[str] = None,
        workspace_id: Optional[str] = None) -> DestinationSnowflakeCortexfunc GetDestinationSnowflakeCortex(ctx *Context, name string, id IDInput, state *DestinationSnowflakeCortexState, opts ...ResourceOption) (*DestinationSnowflakeCortex, error)public static DestinationSnowflakeCortex Get(string name, Input<string> id, DestinationSnowflakeCortexState? state, CustomResourceOptions? opts = null)public static DestinationSnowflakeCortex get(String name, Output<String> id, DestinationSnowflakeCortexState state, CustomResourceOptions options)resources:  _:    type: airbyte:DestinationSnowflakeCortex    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
DestinationSnowflake Cortex Configuration 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- CreatedAt double
- DefinitionId string
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- DestinationId string
- DestinationType string
- Name string
- Name of the destination e.g. dev-mysql-instance.
- WorkspaceId string
- Configuration
DestinationSnowflake Cortex Configuration Args 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- CreatedAt float64
- DefinitionId string
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- DestinationId string
- DestinationType string
- Name string
- Name of the destination e.g. dev-mysql-instance.
- WorkspaceId string
- configuration
DestinationSnowflake Cortex Configuration 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- createdAt Double
- definitionId String
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destinationId String
- destinationType String
- name String
- Name of the destination e.g. dev-mysql-instance.
- workspaceId String
- configuration
DestinationSnowflake Cortex Configuration 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- createdAt number
- definitionId string
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destinationId string
- destinationType string
- name string
- Name of the destination e.g. dev-mysql-instance.
- workspaceId string
- configuration
DestinationSnowflake Cortex Configuration Args 
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- created_at float
- definition_id str
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination_id str
- destination_type str
- name str
- Name of the destination e.g. dev-mysql-instance.
- workspace_id str
- configuration Property Map
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- createdAt Number
- definitionId String
- The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destinationId String
- destinationType String
- name String
- Name of the destination e.g. dev-mysql-instance.
- workspaceId String
Supporting Types
DestinationSnowflakeCortexConfiguration, DestinationSnowflakeCortexConfigurationArgs        
- Embedding
DestinationSnowflake Cortex Configuration Embedding 
- Embedding configuration
- Indexing
DestinationSnowflake Cortex Configuration Indexing 
- Snowflake can be used to store vector data and retrieve embeddings.
- Processing
DestinationSnowflake Cortex Configuration Processing 
- OmitRaw boolText 
- Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- Embedding
DestinationSnowflake Cortex Configuration Embedding 
- Embedding configuration
- Indexing
DestinationSnowflake Cortex Configuration Indexing 
- Snowflake can be used to store vector data and retrieve embeddings.
- Processing
DestinationSnowflake Cortex Configuration Processing 
- OmitRaw boolText 
- Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding
DestinationSnowflake Cortex Configuration Embedding 
- Embedding configuration
- indexing
DestinationSnowflake Cortex Configuration Indexing 
- Snowflake can be used to store vector data and retrieve embeddings.
- processing
DestinationSnowflake Cortex Configuration Processing 
- omitRaw BooleanText 
- Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding
DestinationSnowflake Cortex Configuration Embedding 
- Embedding configuration
- indexing
DestinationSnowflake Cortex Configuration Indexing 
- Snowflake can be used to store vector data and retrieve embeddings.
- processing
DestinationSnowflake Cortex Configuration Processing 
- omitRaw booleanText 
- Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding
DestinationSnowflake Cortex Configuration Embedding 
- Embedding configuration
- indexing
DestinationSnowflake Cortex Configuration Indexing 
- Snowflake can be used to store vector data and retrieve embeddings.
- processing
DestinationSnowflake Cortex Configuration Processing 
- omit_raw_ booltext 
- Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding Property Map
- Embedding configuration
- indexing Property Map
- Snowflake can be used to store vector data and retrieve embeddings.
- processing Property Map
- omitRaw BooleanText 
- Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
DestinationSnowflakeCortexConfigurationEmbedding, DestinationSnowflakeCortexConfigurationEmbeddingArgs          
- AzureOpen DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai 
- Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- Cohere
DestinationSnowflake Cortex Configuration Embedding Cohere 
- Use the Cohere API to embed text.
- Fake
DestinationSnowflake Cortex Configuration Embedding Fake 
- Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- OpenAi DestinationSnowflake Cortex Configuration Embedding Open Ai 
- Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- OpenAi DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible 
- Use a service that's compatible with the OpenAI API to embed text.
- AzureOpen DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai 
- Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- Cohere
DestinationSnowflake Cortex Configuration Embedding Cohere 
- Use the Cohere API to embed text.
- Fake
DestinationSnowflake Cortex Configuration Embedding Fake 
- Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- OpenAi DestinationSnowflake Cortex Configuration Embedding Open Ai 
- Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- OpenAi DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible 
- Use a service that's compatible with the OpenAI API to embed text.
- azureOpen DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai 
- Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere
DestinationSnowflake Cortex Configuration Embedding Cohere 
- Use the Cohere API to embed text.
- fake
DestinationSnowflake Cortex Configuration Embedding Fake 
- Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- openAi DestinationSnowflake Cortex Configuration Embedding Open Ai 
- Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- openAi DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible 
- Use a service that's compatible with the OpenAI API to embed text.
- azureOpen DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai 
- Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere
DestinationSnowflake Cortex Configuration Embedding Cohere 
- Use the Cohere API to embed text.
- fake
DestinationSnowflake Cortex Configuration Embedding Fake 
- Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- openAi DestinationSnowflake Cortex Configuration Embedding Open Ai 
- Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- openAi DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible 
- Use a service that's compatible with the OpenAI API to embed text.
- azure_open_ Destinationai Snowflake Cortex Configuration Embedding Azure Open Ai 
- Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere
DestinationSnowflake Cortex Configuration Embedding Cohere 
- Use the Cohere API to embed text.
- fake
DestinationSnowflake Cortex Configuration Embedding Fake 
- Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- open_ai DestinationSnowflake Cortex Configuration Embedding Open Ai 
- Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- open_ai_ Destinationcompatible Snowflake Cortex Configuration Embedding Open Ai Compatible 
- Use a service that's compatible with the OpenAI API to embed text.
- azureOpen Property MapAi 
- Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere Property Map
- Use the Cohere API to embed text.
- fake Property Map
- Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- openAi Property Map
- Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- openAi Property MapCompatible 
- Use a service that's compatible with the OpenAI API to embed text.
DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi, DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs                
- ApiBase string
- The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- Deployment string
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- OpenaiKey string
- The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- ApiBase string
- The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- Deployment string
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- OpenaiKey string
- The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- apiBase String
- The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment String
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openaiKey String
- The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- apiBase string
- The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment string
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openaiKey string
- The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- api_base str
- The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment str
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openai_key str
- The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- apiBase String
- The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment String
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openaiKey String
- The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
DestinationSnowflakeCortexConfigurationEmbeddingCohere, DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs            
- CohereKey string
- CohereKey string
- cohereKey String
- cohereKey string
- cohere_key str
- cohereKey String
DestinationSnowflakeCortexConfigurationEmbeddingOpenAi, DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs              
- OpenaiKey string
- OpenaiKey string
- openaiKey String
- openaiKey string
- openai_key str
- openaiKey String
DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible, DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs                
- BaseUrl string
- The base URL for your OpenAI-compatible service
- Dimensions double
- The number of dimensions the embedding model is generating
- ApiKey string
- Default: ""
- ModelName string
- The name of the model to use for embedding. Default: "text-embedding-ada-002"
- BaseUrl string
- The base URL for your OpenAI-compatible service
- Dimensions float64
- The number of dimensions the embedding model is generating
- ApiKey string
- Default: ""
- ModelName string
- The name of the model to use for embedding. Default: "text-embedding-ada-002"
- baseUrl String
- The base URL for your OpenAI-compatible service
- dimensions Double
- The number of dimensions the embedding model is generating
- apiKey String
- Default: ""
- modelName String
- The name of the model to use for embedding. Default: "text-embedding-ada-002"
- baseUrl string
- The base URL for your OpenAI-compatible service
- dimensions number
- The number of dimensions the embedding model is generating
- apiKey string
- Default: ""
- modelName string
- The name of the model to use for embedding. Default: "text-embedding-ada-002"
- base_url str
- The base URL for your OpenAI-compatible service
- dimensions float
- The number of dimensions the embedding model is generating
- api_key str
- Default: ""
- model_name str
- The name of the model to use for embedding. Default: "text-embedding-ada-002"
- baseUrl String
- The base URL for your OpenAI-compatible service
- dimensions Number
- The number of dimensions the embedding model is generating
- apiKey String
- Default: ""
- modelName String
- The name of the model to use for embedding. Default: "text-embedding-ada-002"
DestinationSnowflakeCortexConfigurationIndexing, DestinationSnowflakeCortexConfigurationIndexingArgs          
- Credentials
DestinationSnowflake Cortex Configuration Indexing Credentials 
- Database string
- Enter the name of the database that you want to sync data into
- DefaultSchema string
- Enter the name of the default schema
- Host string
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- Role string
- Enter the role that you want to use to access Snowflake
- Username string
- Enter the name of the user you want to use to access the database
- Warehouse string
- Enter the name of the warehouse that you want to use as a compute cluster
- Credentials
DestinationSnowflake Cortex Configuration Indexing Credentials 
- Database string
- Enter the name of the database that you want to sync data into
- DefaultSchema string
- Enter the name of the default schema
- Host string
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- Role string
- Enter the role that you want to use to access Snowflake
- Username string
- Enter the name of the user you want to use to access the database
- Warehouse string
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials
DestinationSnowflake Cortex Configuration Indexing Credentials 
- database String
- Enter the name of the database that you want to sync data into
- defaultSchema String
- Enter the name of the default schema
- host String
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role String
- Enter the role that you want to use to access Snowflake
- username String
- Enter the name of the user you want to use to access the database
- warehouse String
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials
DestinationSnowflake Cortex Configuration Indexing Credentials 
- database string
- Enter the name of the database that you want to sync data into
- defaultSchema string
- Enter the name of the default schema
- host string
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role string
- Enter the role that you want to use to access Snowflake
- username string
- Enter the name of the user you want to use to access the database
- warehouse string
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials
DestinationSnowflake Cortex Configuration Indexing Credentials 
- database str
- Enter the name of the database that you want to sync data into
- default_schema str
- Enter the name of the default schema
- host str
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role str
- Enter the role that you want to use to access Snowflake
- username str
- Enter the name of the user you want to use to access the database
- warehouse str
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials Property Map
- database String
- Enter the name of the database that you want to sync data into
- defaultSchema String
- Enter the name of the default schema
- host String
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role String
- Enter the role that you want to use to access Snowflake
- username String
- Enter the name of the user you want to use to access the database
- warehouse String
- Enter the name of the warehouse that you want to use as a compute cluster
DestinationSnowflakeCortexConfigurationIndexingCredentials, DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs            
- Password string
- Enter the password you want to use to access the database
- Password string
- Enter the password you want to use to access the database
- password String
- Enter the password you want to use to access the database
- password string
- Enter the password you want to use to access the database
- password str
- Enter the password you want to use to access the database
- password String
- Enter the password you want to use to access the database
DestinationSnowflakeCortexConfigurationProcessing, DestinationSnowflakeCortexConfigurationProcessingArgs          
- ChunkSize double
- Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- ChunkOverlap double
- Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- FieldName List<DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping> 
- List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- MetadataFields List<string>
- List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
- TextFields List<string>
- List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray.
- TextSplitter DestinationSnowflake Cortex Configuration Processing Text Splitter 
- Split text fields into chunks based on the specified method.
- ChunkSize float64
- Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- ChunkOverlap float64
- Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- FieldName []DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping 
- List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- MetadataFields []string
- List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
- TextFields []string
- List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray.
- TextSplitter DestinationSnowflake Cortex Configuration Processing Text Splitter 
- Split text fields into chunks based on the specified method.
- chunkSize Double
- Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunkOverlap Double
- Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- fieldName List<DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping> 
- List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadataFields List<String>
- List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
- textFields List<String>
- List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray.
- textSplitter DestinationSnowflake Cortex Configuration Processing Text Splitter 
- Split text fields into chunks based on the specified method.
- chunkSize number
- Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunkOverlap number
- Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- fieldName DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping[] 
- List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadataFields string[]
- List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
- textFields string[]
- List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray.
- textSplitter DestinationSnowflake Cortex Configuration Processing Text Splitter 
- Split text fields into chunks based on the specified method.
- chunk_size float
- Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunk_overlap float
- Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- field_name_ Sequence[Destinationmappings Snowflake Cortex Configuration Processing Field Name Mapping] 
- List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadata_fields Sequence[str]
- List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
- text_fields Sequence[str]
- List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray.
- text_splitter DestinationSnowflake Cortex Configuration Processing Text Splitter 
- Split text fields into chunks based on the specified method.
- chunkSize Number
- Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunkOverlap Number
- Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- fieldName List<Property Map>Mappings 
- List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadataFields List<String>
- List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
- textFields List<String>
- List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.namewill access thenamefield in theuserobject. It's also possible to use wildcards to access all fields in an object, e.g.users.*.namewill access allnamesfields in all entries of theusersarray.
- textSplitter Property Map
- Split text fields into chunks based on the specified method.
DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping, DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs                
- from_field str
- The field name in the source
- to_field str
- The field name to use in the destination
DestinationSnowflakeCortexConfigurationProcessingTextSplitter, DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs              
- ByMarkdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header 
- Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- ByProgramming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language 
- Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- BySeparator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator 
- Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- ByMarkdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header 
- Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- ByProgramming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language 
- Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- BySeparator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator 
- Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- byMarkdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header 
- Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- byProgramming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language 
- Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- bySeparator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator 
- Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- byMarkdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header 
- Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- byProgramming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language 
- Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- bySeparator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator 
- Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- by_markdown_ Destinationheader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header 
- Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- by_programming_ Destinationlanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language 
- Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- by_separator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator 
- Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- byMarkdown Property MapHeader 
- Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- byProgramming Property MapLanguage 
- Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- bySeparator Property Map
- Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader, DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs                    
- SplitLevel double
- Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- SplitLevel float64
- Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- splitLevel Double
- Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- splitLevel number
- Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- split_level float
- Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- splitLevel Number
- Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage, DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs                    
- Language string
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- Language string
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language String
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language string
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language str
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language String
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator, DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs                  
- KeepSeparator bool
- Whether to keep the separator in the resulting chunks. Default: false
- Separators List<string>
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- KeepSeparator bool
- Whether to keep the separator in the resulting chunks. Default: false
- Separators []string
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keepSeparator Boolean
- Whether to keep the separator in the resulting chunks. Default: false
- separators List<String>
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keepSeparator boolean
- Whether to keep the separator in the resulting chunks. Default: false
- separators string[]
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keep_separator bool
- Whether to keep the separator in the resulting chunks. Default: false
- separators Sequence[str]
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keepSeparator Boolean
- Whether to keep the separator in the resulting chunks. Default: false
- separators List<String>
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
Import
$ pulumi import airbyte:index/destinationSnowflakeCortex:DestinationSnowflakeCortex my_airbyte_destination_snowflake_cortex ""
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- airbyte airbytehq/terraform-provider-airbyte
- License
- Notes
- This Pulumi package is based on the airbyteTerraform Provider.