Skip to content

Generate enum values for bundle fields #3049

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 2 commits into
base: generate-required-v3
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions bundle/config/variable/resolve_metastore.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,8 @@ type resolveMetastore struct {
}

func (l resolveMetastore) Resolve(ctx context.Context, w *databricks.WorkspaceClient) (string, error) {
entity, err := w.Metastores.GetByName(ctx, l.name)
if err != nil {
return "", err
}
return entity.MetastoreId, nil
// PLACEHOLDER, this will be fixed in the SDK bump.
return "", nil
}

func (l resolveMetastore) String() string {
Expand Down
238 changes: 238 additions & 0 deletions bundle/internal/validation/enum.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
package main

import (
"bytes"
"fmt"
"os"
"path/filepath"
"reflect"
"sort"
"text/template"

"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/structdiff/structpath"
"github.com/databricks/cli/libs/structwalk"
)

type EnumPatternInfo struct {
// The full pattern for which the enum values are applicable.
// This is a string representation of [dyn.Pattern].
Pattern string

// List of valid enum values for this pattern. This field will be a string of the
// form `{value1, value2, ...}`.
EnumValues string
}

// hasValuesMethod checks if the pointer to a type has a Values() method
func hasValuesMethod(typ reflect.Type) bool {
// Check if the pointer to the type has a Values() method
ptrType := reflect.PointerTo(typ)
return checkValuesMethodSignature(ptrType)
}

// checkValuesMethodSignature verifies that a type has a Values() method with correct signature
func checkValuesMethodSignature(typ reflect.Type) bool {
method, exists := typ.MethodByName("Values")
if !exists {
return false
}

// Verify the method signature: func() []string
methodType := method.Type
if methodType.NumIn() != 1 || methodType.NumOut() != 1 {
return false
}

// Check return type is []string
returnType := methodType.Out(0)
if returnType.Kind() != reflect.Slice || returnType.Elem().Kind() != reflect.String {
return false
}

return true
}

// getEnumValues calls the Values() method on a pointer to the type to get valid enum values
func getEnumValues(typ reflect.Type) ([]string, error) {
// Create a pointer to zero value of the type and call Values() on it
zeroValue := reflect.Zero(typ)
ptrValue := reflect.New(typ)
ptrValue.Elem().Set(zeroValue)
method := ptrValue.MethodByName("Values")

if !method.IsValid() {
return nil, fmt.Errorf("Values method not found on pointer to type %s", typ.Name())
}

result := method.Call(nil)
if len(result) != 1 {
return nil, fmt.Errorf("Values method should return exactly one value")
}

enumSlice := result[0]
if enumSlice.Kind() != reflect.Slice {
return nil, fmt.Errorf("Values method should return a slice")
}

values := make([]string, enumSlice.Len())
for i := 0; i < enumSlice.Len(); i++ {
values[i] = enumSlice.Index(i).String()
}

return values, nil
}

// extractEnumFields walks through a struct type and extracts enum field patterns
func extractEnumFields(typ reflect.Type) ([]EnumPatternInfo, error) {
var patterns []EnumPatternInfo

err := structwalk.WalkType(typ, func(path *structpath.PathNode, fieldType reflect.Type) bool {
if path == nil {
return true
}

// Do not generate enum validation code for fields that are internal or readonly.
bundleTag := path.BundleTag()
if bundleTag.Internal() || bundleTag.ReadOnly() {
return false
}

// Check if this type has a Values() method on its pointer
if !hasValuesMethod(fieldType) {
return true
}

// Get the enum values
enumValues, err := getEnumValues(fieldType)
if err != nil {
// Skip if we can't get enum values
return true
}

// Store the full pattern path (not parent path)
fullPattern := path.DynPath()
patterns = append(patterns, EnumPatternInfo{
Pattern: fullPattern,
EnumValues: formatValues(enumValues),
})

return true
})
if err != nil {
return nil, err
}

return patterns, nil
}

// groupEnumPatternsByKey groups patterns by their logical grouping key
func groupEnumPatternsByKey(patterns []EnumPatternInfo) map[string][]EnumPatternInfo {
groupedPatterns := make(map[string][]EnumPatternInfo)

for _, pattern := range patterns {
key := getPatternGroupingKey(pattern.Pattern)
groupedPatterns[key] = append(groupedPatterns[key], pattern)
}

return groupedPatterns
}

func filterEnumTargetsAndEnvironments(patterns map[string][]EnumPatternInfo) map[string][]EnumPatternInfo {
filtered := make(map[string][]EnumPatternInfo)
for key, patterns := range patterns {
if key == "targets" || key == "environments" {
continue
}
filtered[key] = patterns
}
return filtered
}

// sortGroupedEnumPatterns sorts patterns within each group and returns them as a sorted slice
func sortGroupedEnumPatterns(groupedPatterns map[string][]EnumPatternInfo) [][]EnumPatternInfo {
// Get sorted group keys
groupKeys := make([]string, 0, len(groupedPatterns))
for key := range groupedPatterns {
groupKeys = append(groupKeys, key)
}
sort.Strings(groupKeys)

// Build sorted result
result := make([][]EnumPatternInfo, 0, len(groupKeys))
for _, key := range groupKeys {
patterns := groupedPatterns[key]

// Sort patterns within each group by pattern path
sort.Slice(patterns, func(i, j int) bool {
return patterns[i].Pattern < patterns[j].Pattern
})

result = append(result, patterns)
}

return result
}

// enumFields returns grouped enum field patterns for validation
func enumFields() ([][]EnumPatternInfo, error) {
patterns, err := extractEnumFields(reflect.TypeOf(config.Root{}))
if err != nil {
return nil, err
}
groupedPatterns := groupEnumPatternsByKey(patterns)
filteredPatterns := filterEnumTargetsAndEnvironments(groupedPatterns)
return sortGroupedEnumPatterns(filteredPatterns), nil
}

// Generate creates a Go source file with enum field validation rules
func generateEnumFields(outPath string) error {
enumFields, err := enumFields()
if err != nil {
return fmt.Errorf("failed to generate enum fields: %w", err)
}

// Ensure output directory exists
if err := os.MkdirAll(outPath, 0o755); err != nil {
return fmt.Errorf("failed to create output directory: %w", err)
}

// Parse and execute template
tmpl, err := template.New("enum_validation").Parse(enumValidationTemplate)
if err != nil {
return fmt.Errorf("failed to parse template: %w", err)
}

var generatedCode bytes.Buffer
if err := tmpl.Execute(&generatedCode, enumFields); err != nil {
return fmt.Errorf("failed to execute template: %w", err)
}

// Write generated code to file
filePath := filepath.Join(outPath, "enum_fields.go")
if err := os.WriteFile(filePath, generatedCode.Bytes(), 0o644); err != nil {
return fmt.Errorf("failed to write generated code: %w", err)
}

return nil
}

// enumValidationTemplate is the Go text template for generating the enum validation map
const enumValidationTemplate = `package generated

// THIS FILE IS AUTOGENERATED.
// DO NOT EDIT THIS FILE DIRECTLY.

import (
_ "github.com/databricks/cli/libs/dyn"
)

// EnumFields maps [dyn.Pattern] to valid enum values they should have.
var EnumFields = map[string][]string{
{{- range . }}
{{- range . }}
"{{ .Pattern }}": {{ .EnumValues }},
{{- end }}
{{ end -}}
}
`
126 changes: 126 additions & 0 deletions bundle/internal/validation/generated/enum_fields.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
package generated

// THIS FILE IS AUTOGENERATED.
// DO NOT EDIT THIS FILE DIRECTLY.

import (
_ "github.com/databricks/cli/libs/dyn"
)

// EnumFields maps [dyn.Pattern] to valid enum values they should have.
var EnumFields = map[string][]string{
"resources.apps.*.active_deployment.mode": {"AUTO_SYNC", "SNAPSHOT"},
"resources.apps.*.active_deployment.status.state": {"CANCELLED", "FAILED", "IN_PROGRESS", "SUCCEEDED"},
"resources.apps.*.app_status.state": {"CRASHED", "DEPLOYING", "RUNNING", "UNAVAILABLE"},
"resources.apps.*.compute_status.state": {"ACTIVE", "DELETING", "ERROR", "STARTING", "STOPPED", "STOPPING", "UPDATING"},
"resources.apps.*.pending_deployment.mode": {"AUTO_SYNC", "SNAPSHOT"},
"resources.apps.*.pending_deployment.status.state": {"CANCELLED", "FAILED", "IN_PROGRESS", "SUCCEEDED"},
"resources.apps.*.resources[*].job.permission": {"CAN_MANAGE", "CAN_MANAGE_RUN", "CAN_VIEW", "IS_OWNER"},
"resources.apps.*.resources[*].secret.permission": {"MANAGE", "READ", "WRITE"},
"resources.apps.*.resources[*].serving_endpoint.permission": {"CAN_MANAGE", "CAN_QUERY", "CAN_VIEW"},
"resources.apps.*.resources[*].sql_warehouse.permission": {"CAN_MANAGE", "CAN_USE", "IS_OWNER"},
"resources.apps.*.resources[*].uc_securable.permission": {"READ_VOLUME", "WRITE_VOLUME"},
"resources.apps.*.resources[*].uc_securable.securable_type": {"VOLUME"},

"resources.clusters.*.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"},
"resources.clusters.*.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"},
"resources.clusters.*.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"},
"resources.clusters.*.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"},
"resources.clusters.*.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"},
"resources.clusters.*.kind": {"CLASSIC_PREVIEW"},
"resources.clusters.*.runtime_engine": {"NULL", "PHOTON", "STANDARD"},

"resources.dashboards.*.lifecycle_state": {"ACTIVE", "TRASHED"},

"resources.jobs.*.continuous.pause_status": {"PAUSED", "UNPAUSED"},
"resources.jobs.*.deployment.kind": {"BUNDLE"},
"resources.jobs.*.edit_mode": {"EDITABLE", "UI_LOCKED"},
"resources.jobs.*.format": {"MULTI_TASK", "SINGLE_TASK"},
"resources.jobs.*.git_source.git_provider": {"awsCodeCommit", "azureDevOpsServices", "bitbucketCloud", "bitbucketServer", "gitHub", "gitHubEnterprise", "gitLab", "gitLabEnterpriseEdition"},
"resources.jobs.*.git_source.job_source.dirty_state": {"DISCONNECTED", "NOT_SYNCED"},
"resources.jobs.*.health.rules[*].metric": {"RUN_DURATION_SECONDS", "STREAMING_BACKLOG_BYTES", "STREAMING_BACKLOG_FILES", "STREAMING_BACKLOG_RECORDS", "STREAMING_BACKLOG_SECONDS"},
"resources.jobs.*.health.rules[*].op": {"GREATER_THAN"},
"resources.jobs.*.job_clusters[*].new_cluster.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"},
"resources.jobs.*.job_clusters[*].new_cluster.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"},
"resources.jobs.*.job_clusters[*].new_cluster.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"},
"resources.jobs.*.job_clusters[*].new_cluster.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"},
"resources.jobs.*.job_clusters[*].new_cluster.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"},
"resources.jobs.*.job_clusters[*].new_cluster.kind": {"CLASSIC_PREVIEW"},
"resources.jobs.*.job_clusters[*].new_cluster.runtime_engine": {"NULL", "PHOTON", "STANDARD"},
"resources.jobs.*.performance_target": {"PERFORMANCE_OPTIMIZED", "STANDARD"},
"resources.jobs.*.schedule.pause_status": {"PAUSED", "UNPAUSED"},
"resources.jobs.*.tasks[*].condition_task.op": {"EQUAL_TO", "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "NOT_EQUAL"},
"resources.jobs.*.tasks[*].dbt_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].for_each_task.task.condition_task.op": {"EQUAL_TO", "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "NOT_EQUAL"},
"resources.jobs.*.tasks[*].for_each_task.task.dbt_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].for_each_task.task.gen_ai_compute_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].for_each_task.task.health.rules[*].metric": {"RUN_DURATION_SECONDS", "STREAMING_BACKLOG_BYTES", "STREAMING_BACKLOG_FILES", "STREAMING_BACKLOG_RECORDS", "STREAMING_BACKLOG_SECONDS"},
"resources.jobs.*.tasks[*].for_each_task.task.health.rules[*].op": {"GREATER_THAN"},
"resources.jobs.*.tasks[*].for_each_task.task.new_cluster.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"},
"resources.jobs.*.tasks[*].for_each_task.task.new_cluster.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"},
"resources.jobs.*.tasks[*].for_each_task.task.new_cluster.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"},
"resources.jobs.*.tasks[*].for_each_task.task.new_cluster.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"},
"resources.jobs.*.tasks[*].for_each_task.task.new_cluster.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"},
"resources.jobs.*.tasks[*].for_each_task.task.new_cluster.kind": {"CLASSIC_PREVIEW"},
"resources.jobs.*.tasks[*].for_each_task.task.new_cluster.runtime_engine": {"NULL", "PHOTON", "STANDARD"},
"resources.jobs.*.tasks[*].for_each_task.task.notebook_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].for_each_task.task.power_bi_task.power_bi_model.authentication_method": {"OAUTH", "PAT"},
"resources.jobs.*.tasks[*].for_each_task.task.power_bi_task.power_bi_model.storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"},
"resources.jobs.*.tasks[*].for_each_task.task.power_bi_task.tables[*].storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"},
"resources.jobs.*.tasks[*].for_each_task.task.run_if": {"ALL_DONE", "ALL_FAILED", "ALL_SUCCESS", "AT_LEAST_ONE_FAILED", "AT_LEAST_ONE_SUCCESS", "NONE_FAILED"},
"resources.jobs.*.tasks[*].for_each_task.task.spark_python_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].for_each_task.task.sql_task.file.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].gen_ai_compute_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].health.rules[*].metric": {"RUN_DURATION_SECONDS", "STREAMING_BACKLOG_BYTES", "STREAMING_BACKLOG_FILES", "STREAMING_BACKLOG_RECORDS", "STREAMING_BACKLOG_SECONDS"},
"resources.jobs.*.tasks[*].health.rules[*].op": {"GREATER_THAN"},
"resources.jobs.*.tasks[*].new_cluster.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"},
"resources.jobs.*.tasks[*].new_cluster.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"},
"resources.jobs.*.tasks[*].new_cluster.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"},
"resources.jobs.*.tasks[*].new_cluster.data_security_mode": {"DATA_SECURITY_MODE_AUTO", "DATA_SECURITY_MODE_DEDICATED", "DATA_SECURITY_MODE_STANDARD", "LEGACY_PASSTHROUGH", "LEGACY_SINGLE_USER", "LEGACY_SINGLE_USER_STANDARD", "LEGACY_TABLE_ACL", "NONE", "SINGLE_USER", "USER_ISOLATION"},
"resources.jobs.*.tasks[*].new_cluster.gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"},
"resources.jobs.*.tasks[*].new_cluster.kind": {"CLASSIC_PREVIEW"},
"resources.jobs.*.tasks[*].new_cluster.runtime_engine": {"NULL", "PHOTON", "STANDARD"},
"resources.jobs.*.tasks[*].notebook_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].power_bi_task.power_bi_model.authentication_method": {"OAUTH", "PAT"},
"resources.jobs.*.tasks[*].power_bi_task.power_bi_model.storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"},
"resources.jobs.*.tasks[*].power_bi_task.tables[*].storage_mode": {"DIRECT_QUERY", "DUAL", "IMPORT"},
"resources.jobs.*.tasks[*].run_if": {"ALL_DONE", "ALL_FAILED", "ALL_SUCCESS", "AT_LEAST_ONE_FAILED", "AT_LEAST_ONE_SUCCESS", "NONE_FAILED"},
"resources.jobs.*.tasks[*].spark_python_task.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.tasks[*].sql_task.file.source": {"GIT", "WORKSPACE"},
"resources.jobs.*.trigger.pause_status": {"PAUSED", "UNPAUSED"},
"resources.jobs.*.trigger.periodic.unit": {"DAYS", "HOURS", "WEEKS"},
"resources.jobs.*.trigger.table.condition": {"ALL_UPDATED", "ANY_UPDATED"},
"resources.jobs.*.trigger.table_update.condition": {"ALL_UPDATED", "ANY_UPDATED"},

"resources.model_serving_endpoints.*.ai_gateway.guardrails.input.pii.behavior": {"BLOCK", "NONE"},
"resources.model_serving_endpoints.*.ai_gateway.guardrails.output.pii.behavior": {"BLOCK", "NONE"},
"resources.model_serving_endpoints.*.ai_gateway.rate_limits[*].key": {"endpoint", "user"},
"resources.model_serving_endpoints.*.ai_gateway.rate_limits[*].renewal_period": {"minute"},
"resources.model_serving_endpoints.*.config.served_entities[*].external_model.amazon_bedrock_config.bedrock_provider": {"ai21labs", "amazon", "anthropic", "cohere"},
"resources.model_serving_endpoints.*.config.served_entities[*].external_model.provider": {"ai21labs", "amazon-bedrock", "anthropic", "cohere", "custom", "databricks-model-serving", "google-cloud-vertex-ai", "openai", "palm"},
"resources.model_serving_endpoints.*.config.served_entities[*].workload_type": {"CPU", "GPU_LARGE", "GPU_MEDIUM", "GPU_SMALL", "MULTIGPU_MEDIUM"},
"resources.model_serving_endpoints.*.config.served_models[*].workload_type": {"CPU", "GPU_LARGE", "GPU_MEDIUM", "GPU_SMALL", "MULTIGPU_MEDIUM"},
"resources.model_serving_endpoints.*.rate_limits[*].key": {"endpoint", "user"},
"resources.model_serving_endpoints.*.rate_limits[*].renewal_period": {"minute"},

"resources.pipelines.*.clusters[*].autoscale.mode": {"ENHANCED", "LEGACY"},
"resources.pipelines.*.clusters[*].aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"},
"resources.pipelines.*.clusters[*].aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"},
"resources.pipelines.*.clusters[*].azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"},
"resources.pipelines.*.clusters[*].gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"},
"resources.pipelines.*.deployment.kind": {"BUNDLE"},
"resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"},
"resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"},
"resources.pipelines.*.ingestion_definition.objects[*].table.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"},
"resources.pipelines.*.ingestion_definition.source_type": {"DYNAMICS365", "GA4_RAW_DATA", "MANAGED_POSTGRESQL", "MYSQL", "NETSUITE", "ORACLE", "POSTGRESQL", "SALESFORCE", "SERVICENOW", "SHAREPOINT", "SQLSERVER", "TERADATA", "WORKDAY_RAAS"},
"resources.pipelines.*.ingestion_definition.table_configuration.scd_type": {"SCD_TYPE_1", "SCD_TYPE_2"},
"resources.pipelines.*.restart_window.days_of_week[*]": {"FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"},

"resources.quality_monitors.*.custom_metrics[*].type": {"CUSTOM_METRIC_TYPE_AGGREGATE", "CUSTOM_METRIC_TYPE_DERIVED", "CUSTOM_METRIC_TYPE_DRIFT"},
"resources.quality_monitors.*.inference_log.problem_type": {"PROBLEM_TYPE_CLASSIFICATION", "PROBLEM_TYPE_REGRESSION"},
"resources.quality_monitors.*.schedule.pause_status": {"PAUSED", "UNPAUSED"},

"resources.secret_scopes.*.backend_type": {"AZURE_KEYVAULT", "DATABRICKS"},

"resources.volumes.*.volume_type": {"EXTERNAL", "MANAGED"},
}
1 change: 0 additions & 1 deletion bundle/internal/validation/generated/required_fields.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading