diff --git a/bundle/internal/schema/main.go b/bundle/internal/schema/main.go index e0d4d516e0..96a574e8b1 100644 --- a/bundle/internal/schema/main.go +++ b/bundle/internal/schema/main.go @@ -7,6 +7,8 @@ import ( "os" "path/filepath" "reflect" + "slices" + "strings" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" @@ -182,6 +184,68 @@ func removeOutputOnlyFields(typ reflect.Type, s jsonschema.Schema) jsonschema.Sc return s } +func enhanceDescriptions(_ reflect.Type, s jsonschema.Schema) jsonschema.Schema { + // Add enum values suffix for string types with enums + if s.Type == jsonschema.StringType && len(s.Enum) > 0 { + // Skip if description already contains 2+ enum values. This is a rubric / best effort to avoid + // duplicate information if the description already contains the enum values. + enumCount := 0 + descLower := strings.ToLower(s.Description) + for _, v := range s.Enum { + if str, ok := v.(string); ok && strings.Contains(descLower, strings.ToLower(str)) { + enumCount++ + } + } + + if enumCount < 2 { + suffix := formatEnumSuffix(s.Enum) + if s.Description == "" { + s.Description = suffix + } else { + s.Description = strings.TrimSuffix(s.Description, ".") + ". " + suffix + } + } + } + + // Add "Required." prefix for object properties that are required + if s.Type == jsonschema.ObjectType && s.Properties != nil { + for name, prop := range s.Properties { + // If the description contains "required" (case insensitive) we skip the prefix. + // This is a rubric to prevent malformed descriptions like "Required. Required, my field description". + // Adding the prefix is best effort. + if slices.Contains(s.Required, name) && !strings.Contains(strings.ToLower(prop.Description), "required") { + if prop.Description == "" { + prop.Description = "Required." + } else { + prop.Description = "Required. " + prop.Description + } + } + } + } + + return s +} + +func formatEnumSuffix(enum []any) string { + strs := make([]string, 0, len(enum)) + for _, v := range enum { + if str, ok := v.(string); ok { + strs = append(strs, "`"+str+"`") + } + } + + if len(strs) == 0 { + return "" + } + if len(strs) == 1 { + return "Valid values are: " + strs[0] + "." + } + if len(strs) == 2 { + return "Valid values are: " + strs[0] + " and " + strs[1] + "." + } + return "Valid values are: " + strings.Join(strs[:len(strs)-1], ", ") + ", and " + strs[len(strs)-1] + "." +} + func main() { if len(os.Args) != 3 { fmt.Println("Usage: go run main.go ") @@ -227,6 +291,7 @@ func generateSchema(workdir, outputFile string) { makeVolumeTypeOptional, a.addAnnotations, removeOutputOnlyFields, + enhanceDescriptions, addInterpolationPatterns, }) diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 5150ab9c9f..ca40ad37e2 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -71,9 +71,11 @@ "$ref": "#/$defs/string" }, "display_name": { + "description": "Required.", "$ref": "#/$defs/string" }, "evaluation": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation" }, "file_path": { @@ -89,6 +91,7 @@ "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AlertPermission" }, "query_text": { + "description": "Required.", "$ref": "#/$defs/string" }, "run_as": { @@ -100,9 +103,11 @@ "deprecated": true }, "schedule": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CronSchedule" }, "warehouse_id": { + "description": "Required.", "$ref": "#/$defs/string" } }, @@ -130,6 +135,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AlertPermissionLevel" }, "service_principal_name": { @@ -154,6 +160,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_READ`, and `CAN_RUN`.", "enum": [ "CAN_EDIT", "CAN_MANAGE", @@ -196,7 +203,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.\nIt must be unique within the workspace.", + "description": "Required. The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.\nIt must be unique within the workspace.", "$ref": "#/$defs/string" }, "permissions": { @@ -207,6 +214,7 @@ "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/apps.AppResource" }, "source_code_path": { + "description": "Required.", "$ref": "#/$defs/string" }, "usage_policy_id": { @@ -254,9 +262,11 @@ "type": "object", "properties": { "name": { + "description": "Required.", "$ref": "#/$defs/string" }, "value": { + "description": "Required.", "$ref": "#/$defs/string" } }, @@ -281,6 +291,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AppPermissionLevel" }, "service_principal_name": { @@ -305,6 +316,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE` and `CAN_USE`.", "enum": [ "CAN_MANAGE", "CAN_USE" @@ -471,6 +483,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ClusterPermissionLevel" }, "service_principal_name": { @@ -495,6 +508,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_RESTART`, and `CAN_ATTACH_TO`.", "enum": [ "CAN_MANAGE", "CAN_RESTART", @@ -592,6 +606,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DashboardPermissionLevel" }, "service_principal_name": { @@ -616,6 +631,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_READ`, `CAN_RUN`, `CAN_EDIT`, and `CAN_MANAGE`.", "enum": [ "CAN_READ", "CAN_RUN", @@ -638,11 +654,11 @@ "$ref": "#/$defs/bool" }, "database_instance_name": { - "description": "The name of the DatabaseInstance housing the database.", + "description": "Required. The name of the DatabaseInstance housing the database.", "$ref": "#/$defs/string" }, "database_name": { - "description": "The name of the database (in a instance) associated with the catalog.", + "description": "Required. The name of the database (in a instance) associated with the catalog.", "$ref": "#/$defs/string" }, "lifecycle": { @@ -650,7 +666,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "The name of the catalog in UC.", + "description": "Required. The name of the catalog in UC.", "$ref": "#/$defs/string" } }, @@ -694,7 +710,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "The name of the instance. This is the unique identifier for the instance.", + "description": "Required. The name of the instance. This is the unique identifier for the instance.", "$ref": "#/$defs/string" }, "node_count": { @@ -741,6 +757,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermissionLevel" }, "service_principal_name": { @@ -765,6 +782,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_CREATE`, `CAN_USE`, and `CAN_MANAGE`.", "enum": [ "CAN_CREATE", "CAN_USE", @@ -783,11 +801,11 @@ "type": "object", "properties": { "principal": { - "description": "The name of the principal that will be granted privileges", + "description": "Required. The name of the principal that will be granted privileges", "$ref": "#/$defs/string" }, "privileges": { - "description": "The privileges to grant to the specified entity", + "description": "Required. The privileges to grant to the specified entity", "$ref": "#/$defs/slice/string" } }, @@ -922,6 +940,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.JobPermissionLevel" }, "service_principal_name": { @@ -946,6 +965,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_MANAGE_RUN`, `CAN_VIEW`, and `IS_OWNER`.", "enum": [ "CAN_MANAGE", "CAN_MANAGE_RUN", @@ -991,7 +1011,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "Experiment name.", + "description": "Required. Experiment name.", "$ref": "#/$defs/string" }, "permissions": { @@ -1023,6 +1043,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermissionLevel" }, "service_principal_name": { @@ -1047,6 +1068,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_EDIT`, and `CAN_READ`.", "enum": [ "CAN_MANAGE", "CAN_EDIT", @@ -1073,7 +1095,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "Register models under this name", + "description": "Required. Register models under this name", "$ref": "#/$defs/string" }, "permissions": { @@ -1105,6 +1127,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowModelPermissionLevel" }, "service_principal_name": { @@ -1129,6 +1152,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_EDIT`, `CAN_MANAGE`, `CAN_MANAGE_STAGING_VERSIONS`, `CAN_MANAGE_PRODUCTION_VERSIONS`, and `CAN_READ`.", "enum": [ "CAN_EDIT", "CAN_MANAGE", @@ -1214,6 +1238,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermissionLevel" }, "service_principal_name": { @@ -1238,6 +1263,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_QUERY`, and `CAN_VIEW`.", "enum": [ "CAN_MANAGE", "CAN_QUERY", @@ -1260,7 +1286,7 @@ "$ref": "#/$defs/string" }, "level": { - "description": "The allowed permission for user, group, service principal defined for this permission.", + "description": "Required. The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/string" }, "service_principal_name": { @@ -1439,6 +1465,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.PipelinePermissionLevel" }, "service_principal_name": { @@ -1463,6 +1490,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `IS_OWNER`, `CAN_RUN`, and `CAN_VIEW`.", "enum": [ "CAN_MANAGE", "IS_OWNER", @@ -1482,7 +1510,7 @@ "type": "object", "properties": { "assets_dir": { - "description": "[Create:REQ Update:IGN] Field for specifying the absolute path to a custom directory to store data-monitoring\nassets. Normally prepopulated to a default user location via UI and Python APIs.", + "description": "Required. [Create:REQ Update:IGN] Field for specifying the absolute path to a custom directory to store data-monitoring\nassets. Normally prepopulated to a default user location via UI and Python APIs.", "$ref": "#/$defs/string" }, "baseline_table_name": { @@ -1515,7 +1543,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications" }, "output_schema_name": { - "description": "[Create:REQ Update:REQ] Schema where output tables are created. Needs to be in 2-level format {catalog}.{schema}", + "description": "Required. [Create:REQ Update:REQ] Schema where output tables are created. Needs to be in 2-level format {catalog}.{schema}", "$ref": "#/$defs/string" }, "schedule": { @@ -1535,6 +1563,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot" }, "table_name": { + "description": "Required.", "$ref": "#/$defs/string" }, "time_series": { @@ -1635,7 +1664,7 @@ "type": "object", "properties": { "catalog_name": { - "description": "Name of parent catalog.", + "description": "Required. Name of parent catalog.", "$ref": "#/$defs/string" }, "comment": { @@ -1650,7 +1679,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "Name of schema, relative to parent catalog.", + "description": "Required. Name of schema, relative to parent catalog.", "$ref": "#/$defs/string" }, "properties": { @@ -1680,9 +1709,11 @@ "type": "object", "properties": { "principal": { + "description": "Required.", "$ref": "#/$defs/string" }, "privileges": { + "description": "Required.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrantPrivilege" } }, @@ -1702,6 +1733,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `ALL_PRIVILEGES`, `APPLY_TAG`, `CREATE_FUNCTION`, `CREATE_TABLE`, `CREATE_VOLUME`, `MANAGE`, `USE_SCHEMA`, `EXECUTE`, `MODIFY`, `REFRESH`, `SELECT`, `READ_VOLUME`, and `WRITE_VOLUME`.", "enum": [ "ALL_PRIVILEGES", "APPLY_TAG", @@ -1742,7 +1774,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "Scope name requested by the user. Scope names are unique.", + "description": "Required. Scope name requested by the user. Scope names are unique.", "$ref": "#/$defs/string" }, "permissions": { @@ -1771,7 +1803,7 @@ "$ref": "#/$defs/string" }, "level": { - "description": "The allowed permission for user, group, service principal defined for this permission.", + "description": "Required. The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel" }, "service_principal_name": { @@ -1798,6 +1830,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `READ`, `WRITE`, and `MANAGE`.", "enum": [ "READ", "WRITE", @@ -1893,6 +1926,7 @@ "$ref": "#/$defs/string" }, "level": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermissionLevel" }, "service_principal_name": { @@ -1917,6 +1951,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_USE`, `CAN_MONITOR`, and `CAN_VIEW`.", "enum": [ "CAN_MANAGE", "CAN_USE", @@ -1946,6 +1981,7 @@ "$ref": "#/$defs/string" }, "name": { + "description": "Required.", "$ref": "#/$defs/string" }, "spec": { @@ -1969,7 +2005,7 @@ "type": "object", "properties": { "catalog_name": { - "description": "The name of the catalog where the schema and the volume are", + "description": "Required. The name of the catalog where the schema and the volume are", "$ref": "#/$defs/string" }, "comment": { @@ -1984,11 +2020,11 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "name": { - "description": "The name of the volume", + "description": "Required. The name of the volume", "$ref": "#/$defs/string" }, "schema_name": { - "description": "The name of the schema where the volume is", + "description": "Required. The name of the schema where the volume is", "$ref": "#/$defs/string" }, "storage_location": { @@ -2019,9 +2055,11 @@ "type": "object", "properties": { "principal": { + "description": "Required.", "$ref": "#/$defs/string" }, "privileges": { + "description": "Required.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrantPrivilege" } }, @@ -2041,6 +2079,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `ALL_PRIVILEGES`, `APPLY_TAG`, `MANAGE`, `READ_VOLUME`, and `WRITE_VOLUME`.", "enum": [ "ALL_PRIVILEGES", "APPLY_TAG", @@ -2266,7 +2305,7 @@ "markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)." }, "name": { - "description": "The name of the bundle.", + "description": "Required. The name of the bundle.", "$ref": "#/$defs/string" }, "uuid": { @@ -2591,6 +2630,7 @@ "type": "object", "properties": { "content": { + "description": "Required.", "$ref": "#/$defs/string" } }, @@ -2850,6 +2890,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `SNAPSHOT` and `AUTO_SYNC`.", "enum": [ "SNAPSHOT", "AUTO_SYNC" @@ -2865,6 +2906,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `SUCCEEDED`, `FAILED`, `IN_PROGRESS`, and `CANCELLED`.", "enum": [ "SUCCEEDED", "FAILED", @@ -2912,7 +2954,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob" }, "name": { - "description": "Name of the App Resource.", + "description": "Required. Name of the App Resource.", "$ref": "#/$defs/string" }, "secret": { @@ -2945,12 +2987,15 @@ "type": "object", "properties": { "database_name": { + "description": "Required.", "$ref": "#/$defs/string" }, "instance_name": { + "description": "Required.", "$ref": "#/$defs/string" }, "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabaseDatabasePermission" } }, @@ -2971,6 +3016,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_CONNECT_AND_CREATE`.", "enum": [ "CAN_CONNECT_AND_CREATE" ] @@ -2987,9 +3033,11 @@ "type": "object", "properties": { "experiment_id": { + "description": "Required.", "$ref": "#/$defs/string" }, "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceExperimentExperimentPermission" } }, @@ -3009,6 +3057,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_EDIT`, and `CAN_READ`.", "enum": [ "CAN_MANAGE", "CAN_EDIT", @@ -3027,12 +3076,15 @@ "type": "object", "properties": { "name": { + "description": "Required.", "$ref": "#/$defs/string" }, "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpaceGenieSpacePermission" }, "space_id": { + "description": "Required.", "$ref": "#/$defs/string" } }, @@ -3053,6 +3105,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_EDIT`, `CAN_RUN`, and `CAN_VIEW`.", "enum": [ "CAN_MANAGE", "CAN_EDIT", @@ -3072,9 +3125,11 @@ "type": "object", "properties": { "id": { + "description": "Required.", "$ref": "#/$defs/string" }, "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJobJobPermission" } }, @@ -3094,6 +3149,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `IS_OWNER`, `CAN_MANAGE_RUN`, and `CAN_VIEW`.", "enum": [ "CAN_MANAGE", "IS_OWNER", @@ -3113,12 +3169,15 @@ "type": "object", "properties": { "key": { + "description": "Required.", "$ref": "#/$defs/string" }, "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecretSecretPermission" }, "scope": { + "description": "Required.", "$ref": "#/$defs/string" } }, @@ -3158,9 +3217,11 @@ "type": "object", "properties": { "name": { + "description": "Required.", "$ref": "#/$defs/string" }, "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpointServingEndpointPermission" } }, @@ -3180,6 +3241,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_QUERY`, and `CAN_VIEW`.", "enum": [ "CAN_MANAGE", "CAN_QUERY", @@ -3198,9 +3260,11 @@ "type": "object", "properties": { "id": { + "description": "Required.", "$ref": "#/$defs/string" }, "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouseSqlWarehousePermission" } }, @@ -3220,6 +3284,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CAN_MANAGE`, `CAN_USE`, and `IS_OWNER`.", "enum": [ "CAN_MANAGE", "CAN_USE", @@ -3238,12 +3303,15 @@ "type": "object", "properties": { "permission": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurablePermission" }, "securable_full_name": { + "description": "Required.", "$ref": "#/$defs/string" }, "securable_type": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurableType" } }, @@ -3264,6 +3332,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `READ_VOLUME`, `WRITE_VOLUME`, `SELECT`, `EXECUTE`, and `USE_CONNECTION`.", "enum": [ "READ_VOLUME", "WRITE_VOLUME", @@ -3282,6 +3351,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `VOLUME`, `TABLE`, `FUNCTION`, and `CONNECTION`.", "enum": [ "VOLUME", "TABLE", @@ -3299,6 +3369,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `DEPLOYING`, `RUNNING`, `CRASHED`, and `UNAVAILABLE`.", "enum": [ "DEPLOYING", "RUNNING", @@ -3328,6 +3399,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `MEDIUM` and `LARGE`.", "enum": [ "MEDIUM", "LARGE" @@ -3343,6 +3415,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `ERROR`, `DELETING`, `STARTING`, `STOPPING`, `UPDATING`, `STOPPED`, and `ACTIVE`.", "enum": [ "ERROR", "DELETING", @@ -3404,11 +3477,11 @@ "description": "Git repository configuration specifying the location of the repository.", "properties": { "provider": { - "description": "Git provider. Case insensitive. Supported values: gitHub, gitHubEnterprise, bitbucketCloud,\nbitbucketServer, azureDevOpsServices, gitLab, gitLabEnterpriseEdition, awsCodeCommit.", + "description": "Required. Git provider. Case insensitive. Supported values: gitHub, gitHubEnterprise, bitbucketCloud,\nbitbucketServer, azureDevOpsServices, gitLab, gitLabEnterpriseEdition, awsCodeCommit.", "$ref": "#/$defs/string" }, "url": { - "description": "URL of the Git repository.", + "description": "Required. URL of the Git repository.", "$ref": "#/$defs/string" } }, @@ -3465,11 +3538,11 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus" }, "quartz_cron_expression": { - "description": "The expression that determines when to run the monitor. See [examples](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html).", + "description": "Required. The expression that determines when to run the monitor. See [examples](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html).", "$ref": "#/$defs/string" }, "timezone_id": { - "description": "The timezone id (e.g., ``PST``) in which to evaluate the quartz expression.", + "description": "Required. The timezone id (e.g., ``PST``) in which to evaluate the quartz expression.", "$ref": "#/$defs/string" } }, @@ -3489,7 +3562,7 @@ "oneOf": [ { "type": "string", - "description": "Source link: https://src.dev.databricks.com/databricks/universe/-/blob/elastic-spark-common/api/messages/schedule.proto\nMonitoring workflow schedule pause status.", + "description": "Source link: https://src.dev.databricks.com/databricks/universe/-/blob/elastic-spark-common/api/messages/schedule.proto\nMonitoring workflow schedule pause status. Valid values are: `UNSPECIFIED`, `UNPAUSED`, and `PAUSED`.", "enum": [ "UNSPECIFIED", "UNPAUSED", @@ -3545,7 +3618,7 @@ "type": "object", "properties": { "granularities": { - "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", + "description": "Required. Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string" }, "label_col": { @@ -3553,11 +3626,11 @@ "$ref": "#/$defs/string" }, "model_id_col": { - "description": "Column for the model identifier.", + "description": "Required. Column for the model identifier.", "$ref": "#/$defs/string" }, "prediction_col": { - "description": "Column for the prediction.", + "description": "Required. Column for the prediction.", "$ref": "#/$defs/string" }, "prediction_proba_col": { @@ -3565,11 +3638,11 @@ "$ref": "#/$defs/string" }, "problem_type": { - "description": "Problem type the model aims to solve.", + "description": "Required. Problem type the model aims to solve.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType" }, "timestamp_col": { - "description": "Column for the timestamp.", + "description": "Required. Column for the timestamp.", "$ref": "#/$defs/string" } }, @@ -3592,6 +3665,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `PROBLEM_TYPE_CLASSIFICATION` and `PROBLEM_TYPE_REGRESSION`.", "enum": [ "PROBLEM_TYPE_CLASSIFICATION", "PROBLEM_TYPE_REGRESSION" @@ -3610,23 +3684,23 @@ "description": "Custom metric definition.", "properties": { "definition": { - "description": "Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition](https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition).", + "description": "Required. Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition](https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition).", "$ref": "#/$defs/string" }, "input_columns": { - "description": "A list of column names in the input table the metric should be computed for.\nCan use ``\":table\"`` to indicate that the metric needs information from multiple columns.", + "description": "Required. A list of column names in the input table the metric should be computed for.\nCan use ``\":table\"`` to indicate that the metric needs information from multiple columns.", "$ref": "#/$defs/slice/string" }, "name": { - "description": "Name of the metric in the output tables.", + "description": "Required. Name of the metric in the output tables.", "$ref": "#/$defs/string" }, "output_data_type": { - "description": "The output type of the custom metric.", + "description": "Required. The output type of the custom metric.", "$ref": "#/$defs/string" }, "type": { - "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics", + "description": "Required. Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType" } }, @@ -3706,11 +3780,11 @@ "description": "Time series analysis configuration.", "properties": { "granularities": { - "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", + "description": "Required. Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string" }, "timestamp_col": { - "description": "Column for the timestamp.", + "description": "Required. Column for the timestamp.", "$ref": "#/$defs/string" } }, @@ -3764,6 +3838,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `MANAGED` and `EXTERNAL`.", "enum": [ "MANAGED", "EXTERNAL" @@ -3782,7 +3857,7 @@ "description": "A storage location in Adls Gen2", "properties": { "destination": { - "description": "abfss destination, e.g. `abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e`.", + "description": "Required. abfss destination, e.g. `abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e`.", "$ref": "#/$defs/string" } }, @@ -3876,7 +3951,7 @@ "oneOf": [ { "type": "string", - "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\n\nNote: If `first_on_demand` is zero, this availability type will be used for the entire cluster.", + "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\n\nNote: If `first_on_demand` is zero, this availability type will be used for the entire cluster. Valid values are: `SPOT`, `ON_DEMAND`, and `SPOT_WITH_FALLBACK`.", "enum": [ "SPOT", "ON_DEMAND", @@ -3923,7 +3998,7 @@ "oneOf": [ { "type": "string", - "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\nNote: If `first_on_demand` is zero, this availability type will be used for the entire cluster.", + "description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\nNote: If `first_on_demand` is zero, this availability type will be used for the entire cluster. Valid values are: `SPOT_AZURE`, `ON_DEMAND_AZURE`, and `SPOT_WITH_FALLBACK_AZURE`.", "enum": [ "SPOT_AZURE", "ON_DEMAND_AZURE", @@ -4154,7 +4229,7 @@ "description": "A storage location in DBFS", "properties": { "destination": { - "description": "dbfs destination, e.g. `dbfs:/my/path`", + "description": "Required. dbfs destination, e.g. `dbfs:/my/path`", "$ref": "#/$defs/string" } }, @@ -4216,7 +4291,7 @@ "oneOf": [ { "type": "string", - "description": "All EBS volume types that Databricks supports.\nSee https://aws.amazon.com/ebs/details/ for details.", + "description": "All EBS volume types that Databricks supports.\nSee https://aws.amazon.com/ebs/details/ for details. Valid values are: `GENERAL_PURPOSE_SSD` and `THROUGHPUT_OPTIMIZED_HDD`.", "enum": [ "GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD" @@ -4308,7 +4383,7 @@ "oneOf": [ { "type": "string", - "description": "This field determines whether the instance pool will contain preemptible\nVMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.", + "description": "This field determines whether the instance pool will contain preemptible\nVMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable. Valid values are: `PREEMPTIBLE_GCP`, `ON_DEMAND_GCP`, and `PREEMPTIBLE_WITH_FALLBACK_GCP`.", "enum": [ "PREEMPTIBLE_GCP", "ON_DEMAND_GCP", @@ -4328,7 +4403,7 @@ "description": "A storage location in Google Cloud Platform's GCS", "properties": { "destination": { - "description": "GCS destination/URI, e.g. `gs://my-bucket/some-prefix`", + "description": "Required. GCS destination/URI, e.g. `gs://my-bucket/some-prefix`", "$ref": "#/$defs/string" } }, @@ -4392,6 +4467,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CLASSIC_PREVIEW`.", "enum": [ "CLASSIC_PREVIEW" ] @@ -4452,7 +4528,7 @@ "type": "object", "properties": { "destination": { - "description": "local file destination, e.g. `file:/my/local/file.sh`", + "description": "Required. local file destination, e.g. `file:/my/local/file.sh`", "$ref": "#/$defs/string" } }, @@ -4495,7 +4571,7 @@ "type": "object", "properties": { "coordinates": { - "description": "Gradle-style maven coordinates. For example: \"org.jsoup:jsoup:1.7.2\".", + "description": "Required. Gradle-style maven coordinates. For example: \"org.jsoup:jsoup:1.7.2\".", "$ref": "#/$defs/string" }, "exclusions": { @@ -4524,7 +4600,7 @@ "type": "object", "properties": { "package": { - "description": "The name of the pypi package to install. An optional exact version specification is also\nsupported. Examples: \"simplejson\" and \"simplejson==3.8.0\".", + "description": "Required. The name of the pypi package to install. An optional exact version specification is also\nsupported. Examples: \"simplejson\" and \"simplejson==3.8.0\".", "$ref": "#/$defs/string" }, "repo": { @@ -4549,7 +4625,7 @@ "type": "object", "properties": { "package": { - "description": "The name of the CRAN package to install.", + "description": "Required. The name of the CRAN package to install.", "$ref": "#/$defs/string" }, "repo": { @@ -4572,6 +4648,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `NULL`, `STANDARD`, and `PHOTON`.", "enum": [ "NULL", "STANDARD", @@ -4595,7 +4672,7 @@ "$ref": "#/$defs/string" }, "destination": { - "description": "S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using\ncluster iam role, please make sure you set cluster iam role and the role has write access to the\ndestination. Please also note that you cannot use AWS keys to deliver logs.", + "description": "Required. S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using\ncluster iam role, please make sure you set cluster iam role and the role has write access to the\ndestination. Please also note that you cannot use AWS keys to deliver logs.", "$ref": "#/$defs/string" }, "enable_encryption": { @@ -4637,7 +4714,7 @@ "description": "A storage location back by UC Volumes.", "properties": { "destination": { - "description": "UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`\nor `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`", + "description": "Required. UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`\nor `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string" } }, @@ -4659,7 +4736,7 @@ "description": "Cluster Attributes showing for clusters workload types.", "properties": { "clients": { - "description": "defined what type of clients can use the cluster. E.g. Notebooks, Jobs", + "description": "Required. defined what type of clients can use the cluster. E.g. Notebooks, Jobs", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes" } }, @@ -4681,7 +4758,7 @@ "description": "A storage location in Workspace Filesystem (WSFS)", "properties": { "destination": { - "description": "wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`", + "description": "Required. wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string" } }, @@ -4700,6 +4777,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `ACTIVE` and `TRASHED`.", "enum": [ "ACTIVE", "TRASHED" @@ -4764,6 +4842,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `STARTING`, `AVAILABLE`, `DELETING`, `STOPPED`, `UPDATING`, and `FAILING_OVER`.", "enum": [ "STARTING", "AVAILABLE", @@ -4822,6 +4901,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `PROVISIONING`, `ACTIVE`, `FAILED`, `DELETING`, `UPDATING`, and `DEGRADED`.", "enum": [ "PROVISIONING", "ACTIVE", @@ -4841,6 +4921,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `PROVISIONING_PHASE_MAIN`, `PROVISIONING_PHASE_INDEX_SCAN`, and `PROVISIONING_PHASE_INDEX_SORT`.", "enum": [ "PROVISIONING_PHASE_MAIN", "PROVISIONING_PHASE_INDEX_SCAN", @@ -4921,6 +5002,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CONTINUOUS`, `TRIGGERED`, and `SNAPSHOT`.", "enum": [ "CONTINUOUS", "TRIGGERED", @@ -4980,7 +5062,7 @@ "oneOf": [ { "type": "string", - "description": "The state of a synced table.", + "description": "The state of a synced table. Valid values are: `SYNCED_TABLE_PROVISIONING`, `SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES`, `SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT`, `SYNCED_TABLE_ONLINE`, `SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE`, `SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE`, `SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE`, `SYNCED_TABLED_OFFLINE`, `SYNCED_TABLE_OFFLINE_FAILED`, `SYNCED_TABLE_ONLINE_PIPELINE_FAILED`, and `SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES`.", "enum": [ "SYNCED_TABLE_PROVISIONING", "SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES", @@ -5049,6 +5131,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `OAUTH` and `PAT`.", "enum": [ "OAUTH", "PAT" @@ -5067,7 +5150,7 @@ "description": "Clean Rooms notebook task for V1 Clean Room service (GA).\nReplaces the deprecated CleanRoomNotebookTask (defined above) which was for V0 service.", "properties": { "clean_room_name": { - "description": "The clean room that the notebook belongs to.", + "description": "Required. The clean room that the notebook belongs to.", "$ref": "#/$defs/string" }, "etag": { @@ -5079,7 +5162,7 @@ "$ref": "#/$defs/map/string" }, "notebook_name": { - "description": "Name of the notebook being run.", + "description": "Required. Name of the notebook being run.", "$ref": "#/$defs/string" } }, @@ -5109,7 +5192,7 @@ "$ref": "#/$defs/string" }, "num_gpus": { - "description": "Number of GPUs.", + "description": "Required. Number of GPUs.", "$ref": "#/$defs/int" } }, @@ -5128,6 +5211,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `ANY_UPDATED` and `ALL_UPDATED`.", "enum": [ "ANY_UPDATED", "ALL_UPDATED" @@ -5145,15 +5229,15 @@ "type": "object", "properties": { "left": { - "description": "The left operand of the condition task. Can be either a string value or a job state or parameter reference.", + "description": "Required. The left operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string" }, "op": { - "description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", + "description": "Required. * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTaskOp" }, "right": { - "description": "The right operand of the condition task. Can be either a string value or a job state or parameter reference.", + "description": "Required. The right operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string" } }, @@ -5322,7 +5406,7 @@ "$ref": "#/$defs/string" }, "commands": { - "description": "A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.", + "description": "Required. A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.", "$ref": "#/$defs/slice/string" }, "profiles_directory": { @@ -5367,7 +5451,7 @@ "$ref": "#/$defs/int" }, "url": { - "description": "URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.", + "description": "Required. URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.", "$ref": "#/$defs/string" }, "wait_after_last_change_seconds": { @@ -5396,11 +5480,11 @@ "$ref": "#/$defs/int" }, "inputs": { - "description": "Array for task to iterate on. This can be a JSON string or a reference to\nan array parameter.", + "description": "Required. Array for task to iterate on. This can be a JSON string or a reference to\nan array parameter.", "$ref": "#/$defs/string" }, "task": { - "description": "Configuration for the task that will be run for each element in the array", + "description": "Required. Configuration for the task that will be run for each element in the array", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task" } }, @@ -5420,6 +5504,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `SINGLE_TASK` and `MULTI_TASK`.", "enum": [ "SINGLE_TASK", "MULTI_TASK" @@ -5444,7 +5529,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig" }, "dl_runtime_image": { - "description": "Runtime image", + "description": "Required. Runtime image", "$ref": "#/$defs/string" }, "mlflow_experiment_name": { @@ -5483,6 +5568,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `gitHub`, `bitbucketCloud`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLab`, `gitLabEnterpriseEdition`, and `awsCodeCommit`.", "enum": [ "gitHub", "bitbucketCloud", @@ -5534,7 +5620,7 @@ "$ref": "#/$defs/string" }, "git_provider": { - "description": "Unique identifier of the service used to host the Git repository. The value is case insensitive.", + "description": "Required. Unique identifier of the service used to host the Git repository. The value is case insensitive.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitProvider" }, "git_tag": { @@ -5542,7 +5628,7 @@ "$ref": "#/$defs/string" }, "git_url": { - "description": "URL of the repository to be cloned by this job.", + "description": "Required. URL of the repository to be cloned by this job.", "$ref": "#/$defs/string" } }, @@ -5568,7 +5654,7 @@ "$ref": "#/$defs/string" }, "new_cluster": { - "description": "If new_cluster, a description of a cluster that is created for each task.", + "description": "Required. If new_cluster, a description of a cluster that is created for each task.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec" } }, @@ -5590,7 +5676,7 @@ "type": "object", "properties": { "kind": { - "description": "The kind of deployment that manages the job.\n\n* `BUNDLE`: The job is managed by Databricks Asset Bundle.", + "description": "Required. The kind of deployment that manages the job.\n\n* `BUNDLE`: The job is managed by Databricks Asset Bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobDeploymentKind" }, "metadata_file_path": { @@ -5613,7 +5699,7 @@ "oneOf": [ { "type": "string", - "description": "* `BUNDLE`: The job is managed by Databricks Asset Bundle.", + "description": "* `BUNDLE`: The job is managed by Databricks Asset Bundle. Valid values are: `BUNDLE`.", "enum": [ "BUNDLE" ] @@ -5686,7 +5772,7 @@ "type": "object", "properties": { "environment_key": { - "description": "The key of an environment. It has to be unique within a job.", + "description": "Required. The key of an environment. It has to be unique within a job.", "$ref": "#/$defs/string" }, "spec": { @@ -5732,11 +5818,11 @@ "type": "object", "properties": { "default": { - "description": "Default value of the parameter.", + "description": "Required. Default value of the parameter.", "$ref": "#/$defs/string" }, "name": { - "description": "The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`", + "description": "Required. The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`", "$ref": "#/$defs/string" } }, @@ -5792,11 +5878,11 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobSourceDirtyState" }, "import_from_git_branch": { - "description": "Name of the branch which the job is imported from.", + "description": "Required. Name of the branch which the job is imported from.", "$ref": "#/$defs/string" }, "job_config_path": { - "description": "Path of the job YAML file that contains the job specification.", + "description": "Required. Path of the job YAML file that contains the job specification.", "$ref": "#/$defs/string" } }, @@ -5851,7 +5937,7 @@ "oneOf": [ { "type": "string", - "description": "Specifies the operator used to compare the health metric value with the specified threshold.", + "description": "Specifies the operator used to compare the health metric value with the specified threshold. Valid values are: `GREATER_THAN`.", "enum": [ "GREATER_THAN" ] @@ -5868,13 +5954,15 @@ "type": "object", "properties": { "metric": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthMetric" }, "op": { + "description": "Required.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthOperator" }, "value": { - "description": "Specifies the threshold value that the health metric should obey to satisfy the health rule.", + "description": "Required. Specifies the threshold value that the health metric should obey to satisfy the health rule.", "$ref": "#/$defs/int64" } }, @@ -5919,7 +6007,7 @@ "$ref": "#/$defs/slice/string" }, "condition": { - "description": "The condition based on which to trigger a job run.", + "description": "Required. The condition based on which to trigger a job run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfigurationCondition" }, "min_time_between_triggers_seconds": { @@ -5950,6 +6038,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `MODEL_CREATED`, `MODEL_VERSION_READY`, and `MODEL_ALIAS_SET`.", "enum": [ "MODEL_CREATED", "MODEL_VERSION_READY", @@ -5999,6 +6088,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `UNPAUSED` and `PAUSED`.", "enum": [ "UNPAUSED", "PAUSED" @@ -6014,7 +6104,7 @@ "oneOf": [ { "type": "string", - "description": "PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be.\nThe performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager\n(see cluster-common PerformanceTarget).", + "description": "PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be.\nThe performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager\n(see cluster-common PerformanceTarget). Valid values are: `PERFORMANCE_OPTIMIZED` and `STANDARD`.", "enum": [ "PERFORMANCE_OPTIMIZED", "STANDARD" @@ -6032,11 +6122,11 @@ "type": "object", "properties": { "interval": { - "description": "The interval at which the trigger should run.", + "description": "Required. The interval at which the trigger should run.", "$ref": "#/$defs/int" }, "unit": { - "description": "The unit of time for the interval.", + "description": "Required. The unit of time for the interval.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit" } }, @@ -6056,6 +6146,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `HOURS`, `DAYS`, and `WEEKS`.", "enum": [ "HOURS", "DAYS", @@ -6096,7 +6187,7 @@ "$ref": "#/$defs/bool" }, "pipeline_id": { - "description": "The full name of the pipeline task to execute.", + "description": "Required. The full name of the pipeline task to execute.", "$ref": "#/$defs/string" } }, @@ -6215,7 +6306,7 @@ "type": "object", "properties": { "entry_point": { - "description": "Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`", + "description": "Required. Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`", "$ref": "#/$defs/string" }, "named_parameters": { @@ -6223,7 +6314,7 @@ "$ref": "#/$defs/map/string" }, "package_name": { - "description": "Name of the package to execute", + "description": "Required. Name of the package to execute", "$ref": "#/$defs/string" }, "parameters": { @@ -6306,7 +6397,7 @@ "deprecated": true }, "job_id": { - "description": "ID of the job to trigger.", + "description": "Required. ID of the job to trigger.", "$ref": "#/$defs/int64" }, "job_parameters": { @@ -6491,7 +6582,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery" }, "warehouse_id": { - "description": "The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.", + "description": "Required. The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.", "$ref": "#/$defs/string" } }, @@ -6512,7 +6603,7 @@ "type": "object", "properties": { "alert_id": { - "description": "The canonical identifier of the SQL alert.", + "description": "Required. The canonical identifier of the SQL alert.", "$ref": "#/$defs/string" }, "pause_subscriptions": { @@ -6545,7 +6636,7 @@ "$ref": "#/$defs/string" }, "dashboard_id": { - "description": "The canonical identifier of the SQL dashboard.", + "description": "Required. The canonical identifier of the SQL dashboard.", "$ref": "#/$defs/string" }, "pause_subscriptions": { @@ -6574,7 +6665,7 @@ "type": "object", "properties": { "path": { - "description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.", + "description": "Required. Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.", "$ref": "#/$defs/string" }, "source": { @@ -6599,7 +6690,7 @@ "type": "object", "properties": { "query_id": { - "description": "The canonical identifier of the SQL query.", + "description": "Required. The canonical identifier of the SQL query.", "$ref": "#/$defs/string" } }, @@ -6640,6 +6731,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `DIRECT_QUERY`, `IMPORT`, and `DUAL`.", "enum": [ "DIRECT_QUERY", "IMPORT", @@ -6711,7 +6803,7 @@ "$ref": "#/$defs/int" }, "table_names": { - "description": "A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.", + "description": "Required. A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.", "$ref": "#/$defs/slice/string" }, "wait_after_last_change_seconds": { @@ -6910,7 +7002,7 @@ "$ref": "#/$defs/string" }, "task_key": { - "description": "The name of the task this task depends on.", + "description": "Required. The name of the task this task depends on.", "$ref": "#/$defs/string" } }, @@ -7047,6 +7139,7 @@ "type": "object", "properties": { "id": { + "description": "Required.", "$ref": "#/$defs/string" } }, @@ -7185,7 +7278,7 @@ "oneOf": [ { "type": "string", - "description": "Days of week in which the window is allowed to happen.\nIf not specified all days of the week will be used.", + "description": "Days of week in which the window is allowed to happen.\nIf not specified all days of the week will be used. Valid values are: `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`.", "enum": [ "MONDAY", "TUESDAY", @@ -7206,7 +7299,7 @@ "oneOf": [ { "type": "string", - "description": "The deployment method that manages the pipeline:\n- BUNDLE: The pipeline is managed by a Databricks Asset Bundle.", + "description": "The deployment method that manages the pipeline:\n- BUNDLE: The pipeline is managed by a Databricks Asset Bundle. Valid values are: `BUNDLE`.", "enum": [ "BUNDLE" ] @@ -7322,7 +7415,7 @@ "deprecated": true }, "connection_name": { - "description": "Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", + "description": "Required. Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", "$ref": "#/$defs/string" }, "connection_parameters": { @@ -7491,6 +7584,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `MYSQL`, `POSTGRESQL`, `SQLSERVER`, `SALESFORCE`, `BIGQUERY`, `NETSUITE`, `WORKDAY_RAAS`, `GA4_RAW_DATA`, `SERVICENOW`, `MANAGED_POSTGRESQL`, `ORACLE`, `TERADATA`, `SHAREPOINT`, `DYNAMICS365`, and `FOREIGN_CATALOG`.", "enum": [ "MYSQL", "POSTGRESQL", @@ -7681,11 +7775,11 @@ "type": "object", "properties": { "max_workers": { - "description": "The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.", + "description": "Required. The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.", "$ref": "#/$defs/int" }, "min_workers": { - "description": "The minimum number of workers the cluster can scale down to when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", + "description": "Required. The minimum number of workers the cluster can scale down to when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", "$ref": "#/$defs/int" }, "mode": { @@ -7727,7 +7821,7 @@ "type": "object", "properties": { "kind": { - "description": "The deployment method that manages the pipeline.", + "description": "Required. The deployment method that manages the pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DeploymentKind" }, "metadata_file_path": { @@ -7920,7 +8014,7 @@ "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek" }, "start_hour": { - "description": "An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.\nContinuous pipeline restart is triggered only within a five-hour window starting at this hour.", + "description": "Required. An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.\nContinuous pipeline restart is triggered only within a five-hour window starting at this hour.", "$ref": "#/$defs/int" }, "time_zone_id": { @@ -8152,7 +8246,7 @@ "oneOf": [ { "type": "string", - "description": "The SCD type to use to ingest the table.", + "description": "The SCD type to use to ingest the table. Valid values are: `SCD_TYPE_1`, `SCD_TYPE_2`, and `APPEND_ONLY`.", "enum": [ "SCD_TYPE_1", "SCD_TYPE_2", @@ -8277,6 +8371,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `NONE`, `BLOCK`, and `MASK`.", "enum": [ "NONE", "BLOCK", @@ -8359,7 +8454,7 @@ "$ref": "#/$defs/string" }, "renewal_period": { - "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", + "description": "Required. Renewal period field for a rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod" }, "tokens": { @@ -8382,6 +8477,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `user`, `endpoint`, `user_group`, and `service_principal`.", "enum": [ "user", "endpoint", @@ -8399,6 +8495,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `minute`.", "enum": [ "minute" ] @@ -8441,7 +8538,7 @@ "$ref": "#/$defs/string" }, "aws_region": { - "description": "The AWS region to use. Bedrock has to be enabled there.", + "description": "Required. The AWS region to use. Bedrock has to be enabled there.", "$ref": "#/$defs/string" }, "aws_secret_access_key": { @@ -8453,7 +8550,7 @@ "$ref": "#/$defs/string" }, "bedrock_provider": { - "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", + "description": "Required. The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider" }, "instance_profile_arn": { @@ -8477,6 +8574,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `anthropic`, `cohere`, `ai21labs`, and `amazon`.", "enum": [ "anthropic", "cohere", @@ -8518,7 +8616,7 @@ "type": "object", "properties": { "key": { - "description": "The name of the API key parameter used for authentication.", + "description": "Required. The name of the API key parameter used for authentication.", "$ref": "#/$defs/string" }, "value": { @@ -8634,7 +8732,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth" }, "custom_provider_url": { - "description": "This is a field to provide the URL of the custom provider API.", + "description": "Required. This is a field to provide the URL of the custom provider API.", "$ref": "#/$defs/string" } }, @@ -8663,7 +8761,7 @@ "$ref": "#/$defs/string" }, "databricks_workspace_url": { - "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.", + "description": "Required. The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.", "$ref": "#/$defs/string" } }, @@ -8736,7 +8834,7 @@ "type": "object", "properties": { "key": { - "description": "Key field for a serving endpoint tag.", + "description": "Required. Key field for a serving endpoint tag.", "$ref": "#/$defs/string" }, "value": { @@ -8789,7 +8887,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig" }, "name": { - "description": "The name of the external model.", + "description": "Required. The name of the external model.", "$ref": "#/$defs/string" }, "openai_config": { @@ -8801,11 +8899,11 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig" }, "provider": { - "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.", + "description": "Required. The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider" }, "task": { - "description": "The task type of the external model.", + "description": "Required. The task type of the external model.", "$ref": "#/$defs/string" } }, @@ -8826,6 +8924,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `ai21labs`, `anthropic`, `amazon-bedrock`, `cohere`, `databricks-model-serving`, `google-cloud-vertex-ai`, `openai`, `palm`, and `custom`.", "enum": [ "ai21labs", "anthropic", @@ -8850,7 +8949,7 @@ "type": "object", "properties": { "enabled": { - "description": "Whether to enable traffic fallback. When a served entity in the serving endpoint returns specific error\ncodes (e.g. 500), the request will automatically be round-robin attempted with other served entities in the same\nendpoint, following the order of served entity list, until a successful response is returned.\nIf all attempts fail, return the last response with the error code.", + "description": "Required. Whether to enable traffic fallback. When a served entity in the serving endpoint returns specific error\ncodes (e.g. 500), the request will automatically be round-robin attempted with other served entities in the same\nendpoint, following the order of served entity list, until a successful response is returned.\nIf all attempts fail, return the last response with the error code.", "$ref": "#/$defs/bool" } }, @@ -8879,11 +8978,11 @@ "$ref": "#/$defs/string" }, "project_id": { - "description": "This is the Google Cloud project id that the service account is\nassociated with.", + "description": "Required. This is the Google Cloud project id that the service account is\nassociated with.", "$ref": "#/$defs/string" }, "region": { - "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations", + "description": "Required. This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations", "$ref": "#/$defs/string" } }, @@ -8986,7 +9085,7 @@ "type": "object", "properties": { "calls": { - "description": "Used to specify how many calls are allowed for a key within the renewal_period.", + "description": "Required. Used to specify how many calls are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64" }, "key": { @@ -8994,7 +9093,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey" }, "renewal_period": { - "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", + "description": "Required. Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod" } }, @@ -9014,6 +9113,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `user` and `endpoint`.", "enum": [ "user", "endpoint" @@ -9029,6 +9129,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `minute`.", "enum": [ "minute" ] @@ -9052,7 +9153,7 @@ "$ref": "#/$defs/string" }, "traffic_percentage": { - "description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.", + "description": "Required. The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.", "$ref": "#/$defs/int" } }, @@ -9166,9 +9267,11 @@ "$ref": "#/$defs/int" }, "model_name": { + "description": "Required.", "$ref": "#/$defs/string" }, "model_version": { + "description": "Required.", "$ref": "#/$defs/string" }, "name": { @@ -9180,7 +9283,7 @@ "$ref": "#/$defs/int64" }, "scale_to_zero_enabled": { - "description": "Whether the compute resources for the served entity should scale down to zero.", + "description": "Required. Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool" }, "workload_size": { @@ -9209,7 +9312,7 @@ "oneOf": [ { "type": "string", - "description": "Please keep this in sync with with workload types in InferenceEndpointEntities.scala", + "description": "Please keep this in sync with with workload types in InferenceEndpointEntities.scala. Valid values are: `CPU`, `GPU_MEDIUM`, `GPU_SMALL`, `GPU_LARGE`, and `MULTIGPU_MEDIUM`.", "enum": [ "CPU", "GPU_MEDIUM", @@ -9228,7 +9331,7 @@ "oneOf": [ { "type": "string", - "description": "Please keep this in sync with with workload types in InferenceEndpointEntities.scala", + "description": "Please keep this in sync with with workload types in InferenceEndpointEntities.scala. Valid values are: `CPU`, `GPU_MEDIUM`, `GPU_SMALL`, `GPU_LARGE`, and `MULTIGPU_MEDIUM`.", "enum": [ "CPU", "GPU_MEDIUM", @@ -9265,6 +9368,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `SUM`, `COUNT`, `COUNT_DISTINCT`, `AVG`, `MEDIAN`, `MIN`, `MAX`, and `STDDEV`.", "enum": [ "SUM", "COUNT", @@ -9304,6 +9408,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `ACTIVE` and `DELETED`.", "enum": [ "ACTIVE", "DELETED" @@ -9321,7 +9426,7 @@ "type": "object", "properties": { "comparison_operator": { - "description": "Operator used for comparison in alert evaluation.", + "description": "Required. Operator used for comparison in alert evaluation.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ComparisonOperator" }, "empty_result_state": { @@ -9333,7 +9438,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification" }, "source": { - "description": "Source column from result to use to evaluate alert", + "description": "Required. Source column from result to use to evaluate alert", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn" }, "threshold": { @@ -9410,6 +9515,7 @@ "$ref": "#/$defs/string" }, "name": { + "description": "Required.", "$ref": "#/$defs/string" } }, @@ -9514,6 +9620,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `CHANNEL_NAME_PREVIEW`, `CHANNEL_NAME_CURRENT`, `CHANNEL_NAME_PREVIOUS`, and `CHANNEL_NAME_CUSTOM`.", "enum": [ "CHANNEL_NAME_PREVIEW", "CHANNEL_NAME_CURRENT", @@ -9531,6 +9638,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `LESS_THAN`, `GREATER_THAN`, `EQUAL`, `NOT_EQUAL`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN_OR_EQUAL`, `IS_NULL`, and `IS_NOT_NULL`.", "enum": [ "LESS_THAN", "GREATER_THAN", @@ -9552,6 +9660,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `TYPE_UNSPECIFIED`, `CLASSIC`, and `PRO`.", "enum": [ "TYPE_UNSPECIFIED", "CLASSIC", @@ -9574,11 +9683,11 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.SchedulePauseStatus" }, "quartz_cron_schedule": { - "description": "A cron expression using quartz syntax that specifies the schedule for this pipeline.\nShould use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html", + "description": "Required. A cron expression using quartz syntax that specifies the schedule for this pipeline.\nShould use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html", "$ref": "#/$defs/string" }, "timezone_id": { - "description": "A Java timezone id. The schedule will be resolved using this timezone.\nThis will be combined with the quartz_cron_schedule to determine the schedule.\nSee https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.", + "description": "Required. A Java timezone id. The schedule will be resolved using this timezone.\nThis will be combined with the quartz_cron_schedule to determine the schedule.\nSee https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.", "$ref": "#/$defs/string" } }, @@ -9635,6 +9744,7 @@ "oneOf": [ { "type": "string", + "description": "Valid values are: `UNPAUSED` and `PAUSED`.", "enum": [ "UNPAUSED", "PAUSED" @@ -9670,11 +9780,11 @@ "description": "The metadata of the Azure KeyVault for a secret scope of type `AZURE_KEYVAULT`", "properties": { "dns_name": { - "description": "The DNS of the KeyVault", + "description": "Required. The DNS of the KeyVault", "$ref": "#/$defs/string" }, "resource_id": { - "description": "The resource id of the azure KeyVault that user wants to associate the scope with.", + "description": "Required. The resource id of the azure KeyVault that user wants to associate the scope with.", "$ref": "#/$defs/string" } }, @@ -9694,7 +9804,7 @@ "oneOf": [ { "type": "string", - "description": "The types of secret scope backends in the Secret Manager. Azure KeyVault backed secret scopes\nwill be supported in a later release.", + "description": "The types of secret scope backends in the Secret Manager. Azure KeyVault backed secret scopes\nwill be supported in a later release. Valid values are: `DATABRICKS` and `AZURE_KEYVAULT`.", "enum": [ "DATABRICKS", "AZURE_KEYVAULT" diff --git a/python/databricks/bundles/jobs/_models/adlsgen2_info.py b/python/databricks/bundles/jobs/_models/adlsgen2_info.py index 206c8b676e..da58124edb 100644 --- a/python/databricks/bundles/jobs/_models/adlsgen2_info.py +++ b/python/databricks/bundles/jobs/_models/adlsgen2_info.py @@ -17,7 +17,7 @@ class Adlsgen2Info: destination: VariableOr[str] """ - abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. + Required. abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. """ @classmethod @@ -33,7 +33,7 @@ class Adlsgen2InfoDict(TypedDict, total=False): destination: VariableOr[str] """ - abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. + Required. abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. """ diff --git a/python/databricks/bundles/jobs/_models/authentication_method.py b/python/databricks/bundles/jobs/_models/authentication_method.py index 68be0e6218..17f641a186 100644 --- a/python/databricks/bundles/jobs/_models/authentication_method.py +++ b/python/databricks/bundles/jobs/_models/authentication_method.py @@ -3,6 +3,10 @@ class AuthenticationMethod(Enum): + """ + Valid values are: `OAUTH` and `PAT`. + """ + OAUTH = "OAUTH" PAT = "PAT" diff --git a/python/databricks/bundles/jobs/_models/aws_availability.py b/python/databricks/bundles/jobs/_models/aws_availability.py index 5d87ffafba..6c5cd5ac01 100644 --- a/python/databricks/bundles/jobs/_models/aws_availability.py +++ b/python/databricks/bundles/jobs/_models/aws_availability.py @@ -6,7 +6,7 @@ class AwsAvailability(Enum): """ Availability type used for all subsequent nodes past the `first_on_demand` ones. - Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. Valid values are: `SPOT`, `ON_DEMAND`, and `SPOT_WITH_FALLBACK`. """ SPOT = "SPOT" diff --git a/python/databricks/bundles/jobs/_models/azure_availability.py b/python/databricks/bundles/jobs/_models/azure_availability.py index 72d461d5d7..b8f9588e15 100644 --- a/python/databricks/bundles/jobs/_models/azure_availability.py +++ b/python/databricks/bundles/jobs/_models/azure_availability.py @@ -5,7 +5,7 @@ class AzureAvailability(Enum): """ Availability type used for all subsequent nodes past the `first_on_demand` ones. - Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. Valid values are: `SPOT_AZURE`, `ON_DEMAND_AZURE`, and `SPOT_WITH_FALLBACK_AZURE`. """ SPOT_AZURE = "SPOT_AZURE" diff --git a/python/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py b/python/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py index d0281f144c..88c1cd1f41 100644 --- a/python/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py +++ b/python/databricks/bundles/jobs/_models/clean_rooms_notebook_task.py @@ -22,12 +22,12 @@ class CleanRoomsNotebookTask: clean_room_name: VariableOr[str] """ - The clean room that the notebook belongs to. + Required. The clean room that the notebook belongs to. """ notebook_name: VariableOr[str] """ - Name of the notebook being run. + Required. Name of the notebook being run. """ etag: VariableOrOptional[str] = None @@ -54,12 +54,12 @@ class CleanRoomsNotebookTaskDict(TypedDict, total=False): clean_room_name: VariableOr[str] """ - The clean room that the notebook belongs to. + Required. The clean room that the notebook belongs to. """ notebook_name: VariableOr[str] """ - Name of the notebook being run. + Required. Name of the notebook being run. """ etag: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/compute_config.py b/python/databricks/bundles/jobs/_models/compute_config.py index b1194a80cd..9028623a24 100644 --- a/python/databricks/bundles/jobs/_models/compute_config.py +++ b/python/databricks/bundles/jobs/_models/compute_config.py @@ -17,7 +17,7 @@ class ComputeConfig: num_gpus: VariableOr[int] """ - Number of GPUs. + Required. Number of GPUs. """ gpu_node_pool_id: VariableOrOptional[str] = None @@ -43,7 +43,7 @@ class ComputeConfigDict(TypedDict, total=False): num_gpus: VariableOr[int] """ - Number of GPUs. + Required. Number of GPUs. """ gpu_node_pool_id: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/condition.py b/python/databricks/bundles/jobs/_models/condition.py index d7c1b25bc7..f2e09f4377 100644 --- a/python/databricks/bundles/jobs/_models/condition.py +++ b/python/databricks/bundles/jobs/_models/condition.py @@ -3,6 +3,10 @@ class Condition(Enum): + """ + Valid values are: `ANY_UPDATED` and `ALL_UPDATED`. + """ + ANY_UPDATED = "ANY_UPDATED" ALL_UPDATED = "ALL_UPDATED" diff --git a/python/databricks/bundles/jobs/_models/condition_task.py b/python/databricks/bundles/jobs/_models/condition_task.py index 4934c16b02..3920292f3c 100644 --- a/python/databricks/bundles/jobs/_models/condition_task.py +++ b/python/databricks/bundles/jobs/_models/condition_task.py @@ -19,12 +19,12 @@ class ConditionTask: left: VariableOr[str] """ - The left operand of the condition task. Can be either a string value or a job state or parameter reference. + Required. The left operand of the condition task. Can be either a string value or a job state or parameter reference. """ op: VariableOr[ConditionTaskOp] """ - * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`. + Required. * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to `false`. The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison. @@ -32,7 +32,7 @@ class ConditionTask: right: VariableOr[str] """ - The right operand of the condition task. Can be either a string value or a job state or parameter reference. + Required. The right operand of the condition task. Can be either a string value or a job state or parameter reference. """ @classmethod @@ -48,12 +48,12 @@ class ConditionTaskDict(TypedDict, total=False): left: VariableOr[str] """ - The left operand of the condition task. Can be either a string value or a job state or parameter reference. + Required. The left operand of the condition task. Can be either a string value or a job state or parameter reference. """ op: VariableOr[ConditionTaskOpParam] """ - * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`. + Required. * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to `false`. The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison. @@ -61,7 +61,7 @@ class ConditionTaskDict(TypedDict, total=False): right: VariableOr[str] """ - The right operand of the condition task. Can be either a string value or a job state or parameter reference. + Required. The right operand of the condition task. Can be either a string value or a job state or parameter reference. """ diff --git a/python/databricks/bundles/jobs/_models/dbfs_storage_info.py b/python/databricks/bundles/jobs/_models/dbfs_storage_info.py index 81fe319a65..33d9a3bdc4 100644 --- a/python/databricks/bundles/jobs/_models/dbfs_storage_info.py +++ b/python/databricks/bundles/jobs/_models/dbfs_storage_info.py @@ -17,7 +17,7 @@ class DbfsStorageInfo: destination: VariableOr[str] """ - dbfs destination, e.g. `dbfs:/my/path` + Required. dbfs destination, e.g. `dbfs:/my/path` """ @classmethod @@ -33,7 +33,7 @@ class DbfsStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - dbfs destination, e.g. `dbfs:/my/path` + Required. dbfs destination, e.g. `dbfs:/my/path` """ diff --git a/python/databricks/bundles/jobs/_models/dbt_task.py b/python/databricks/bundles/jobs/_models/dbt_task.py index 52ae3cfa46..e78daf3904 100644 --- a/python/databricks/bundles/jobs/_models/dbt_task.py +++ b/python/databricks/bundles/jobs/_models/dbt_task.py @@ -21,7 +21,7 @@ class DbtTask: commands: VariableOrList[str] = field(default_factory=list) """ - A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. + Required. A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. """ profiles_directory: VariableOrOptional[str] = None @@ -73,7 +73,7 @@ class DbtTaskDict(TypedDict, total=False): commands: VariableOrList[str] """ - A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. + Required. A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided. """ profiles_directory: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/ebs_volume_type.py b/python/databricks/bundles/jobs/_models/ebs_volume_type.py index b67853f8cb..57818c743e 100644 --- a/python/databricks/bundles/jobs/_models/ebs_volume_type.py +++ b/python/databricks/bundles/jobs/_models/ebs_volume_type.py @@ -5,7 +5,7 @@ class EbsVolumeType(Enum): """ All EBS volume types that Databricks supports. - See https://aws.amazon.com/ebs/details/ for details. + See https://aws.amazon.com/ebs/details/ for details. Valid values are: `GENERAL_PURPOSE_SSD` and `THROUGHPUT_OPTIMIZED_HDD`. """ GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" diff --git a/python/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py b/python/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py index cf4ae748cf..1e286a9ceb 100644 --- a/python/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py +++ b/python/databricks/bundles/jobs/_models/file_arrival_trigger_configuration.py @@ -15,7 +15,7 @@ class FileArrivalTriggerConfiguration: url: VariableOr[str] """ - URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location. + Required. URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location. """ min_time_between_triggers_seconds: VariableOrOptional[int] = None @@ -44,7 +44,7 @@ class FileArrivalTriggerConfigurationDict(TypedDict, total=False): url: VariableOr[str] """ - URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location. + Required. URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location. """ min_time_between_triggers_seconds: VariableOrOptional[int] diff --git a/python/databricks/bundles/jobs/_models/for_each_task.py b/python/databricks/bundles/jobs/_models/for_each_task.py index db353a2885..6377b74da5 100644 --- a/python/databricks/bundles/jobs/_models/for_each_task.py +++ b/python/databricks/bundles/jobs/_models/for_each_task.py @@ -17,13 +17,13 @@ class ForEachTask: inputs: VariableOr[str] """ - Array for task to iterate on. This can be a JSON string or a reference to + Required. Array for task to iterate on. This can be a JSON string or a reference to an array parameter. """ task: VariableOr["Task"] """ - Configuration for the task that will be run for each element in the array + Required. Configuration for the task that will be run for each element in the array """ concurrency: VariableOrOptional[int] = None @@ -45,13 +45,13 @@ class ForEachTaskDict(TypedDict, total=False): inputs: VariableOr[str] """ - Array for task to iterate on. This can be a JSON string or a reference to + Required. Array for task to iterate on. This can be a JSON string or a reference to an array parameter. """ task: VariableOr["TaskParam"] """ - Configuration for the task that will be run for each element in the array + Required. Configuration for the task that will be run for each element in the array """ concurrency: VariableOrOptional[int] diff --git a/python/databricks/bundles/jobs/_models/gcp_availability.py b/python/databricks/bundles/jobs/_models/gcp_availability.py index aa8e785a71..89182a3026 100644 --- a/python/databricks/bundles/jobs/_models/gcp_availability.py +++ b/python/databricks/bundles/jobs/_models/gcp_availability.py @@ -5,7 +5,7 @@ class GcpAvailability(Enum): """ This field determines whether the instance pool will contain preemptible - VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable. + VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable. Valid values are: `PREEMPTIBLE_GCP`, `ON_DEMAND_GCP`, and `PREEMPTIBLE_WITH_FALLBACK_GCP`. """ PREEMPTIBLE_GCP = "PREEMPTIBLE_GCP" diff --git a/python/databricks/bundles/jobs/_models/gcs_storage_info.py b/python/databricks/bundles/jobs/_models/gcs_storage_info.py index a5e6d51e6e..e0c38c2a57 100644 --- a/python/databricks/bundles/jobs/_models/gcs_storage_info.py +++ b/python/databricks/bundles/jobs/_models/gcs_storage_info.py @@ -17,7 +17,7 @@ class GcsStorageInfo: destination: VariableOr[str] """ - GCS destination/URI, e.g. `gs://my-bucket/some-prefix` + Required. GCS destination/URI, e.g. `gs://my-bucket/some-prefix` """ @classmethod @@ -33,7 +33,7 @@ class GcsStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - GCS destination/URI, e.g. `gs://my-bucket/some-prefix` + Required. GCS destination/URI, e.g. `gs://my-bucket/some-prefix` """ diff --git a/python/databricks/bundles/jobs/_models/gen_ai_compute_task.py b/python/databricks/bundles/jobs/_models/gen_ai_compute_task.py index 4d2c4b972c..d907675820 100644 --- a/python/databricks/bundles/jobs/_models/gen_ai_compute_task.py +++ b/python/databricks/bundles/jobs/_models/gen_ai_compute_task.py @@ -22,7 +22,7 @@ class GenAiComputeTask: dl_runtime_image: VariableOr[str] """ - Runtime image + Required. Runtime image """ command: VariableOrOptional[str] = None @@ -75,7 +75,7 @@ class GenAiComputeTaskDict(TypedDict, total=False): dl_runtime_image: VariableOr[str] """ - Runtime image + Required. Runtime image """ command: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/git_provider.py b/python/databricks/bundles/jobs/_models/git_provider.py index b3f8a73a37..ef99eea37b 100644 --- a/python/databricks/bundles/jobs/_models/git_provider.py +++ b/python/databricks/bundles/jobs/_models/git_provider.py @@ -3,6 +3,10 @@ class GitProvider(Enum): + """ + Valid values are: `gitHub`, `bitbucketCloud`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLab`, `gitLabEnterpriseEdition`, and `awsCodeCommit`. + """ + GIT_HUB = "gitHub" BITBUCKET_CLOUD = "bitbucketCloud" AZURE_DEV_OPS_SERVICES = "azureDevOpsServices" diff --git a/python/databricks/bundles/jobs/_models/git_source.py b/python/databricks/bundles/jobs/_models/git_source.py index 76fa000f66..fd72abb464 100644 --- a/python/databricks/bundles/jobs/_models/git_source.py +++ b/python/databricks/bundles/jobs/_models/git_source.py @@ -22,12 +22,12 @@ class GitSource: git_provider: VariableOr[GitProvider] """ - Unique identifier of the service used to host the Git repository. The value is case insensitive. + Required. Unique identifier of the service used to host the Git repository. The value is case insensitive. """ git_url: VariableOr[str] """ - URL of the repository to be cloned by this job. + Required. URL of the repository to be cloned by this job. """ git_branch: VariableOrOptional[str] = None @@ -58,12 +58,12 @@ class GitSourceDict(TypedDict, total=False): git_provider: VariableOr[GitProviderParam] """ - Unique identifier of the service used to host the Git repository. The value is case insensitive. + Required. Unique identifier of the service used to host the Git repository. The value is case insensitive. """ git_url: VariableOr[str] """ - URL of the repository to be cloned by this job. + Required. URL of the repository to be cloned by this job. """ git_branch: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/job_cluster.py b/python/databricks/bundles/jobs/_models/job_cluster.py index a23bb51f99..29d464d591 100644 --- a/python/databricks/bundles/jobs/_models/job_cluster.py +++ b/python/databricks/bundles/jobs/_models/job_cluster.py @@ -22,7 +22,7 @@ class JobCluster: new_cluster: VariableOr[ClusterSpec] """ - If new_cluster, a description of a cluster that is created for each task. + Required. If new_cluster, a description of a cluster that is created for each task. """ @classmethod @@ -44,7 +44,7 @@ class JobClusterDict(TypedDict, total=False): new_cluster: VariableOr[ClusterSpecParam] """ - If new_cluster, a description of a cluster that is created for each task. + Required. If new_cluster, a description of a cluster that is created for each task. """ diff --git a/python/databricks/bundles/jobs/_models/job_environment.py b/python/databricks/bundles/jobs/_models/job_environment.py index 798685f4c6..fd8f08873a 100644 --- a/python/databricks/bundles/jobs/_models/job_environment.py +++ b/python/databricks/bundles/jobs/_models/job_environment.py @@ -16,7 +16,7 @@ class JobEnvironment: environment_key: VariableOr[str] """ - The key of an environment. It has to be unique within a job. + Required. The key of an environment. It has to be unique within a job. """ spec: VariableOrOptional[Environment] = None @@ -34,7 +34,7 @@ class JobEnvironmentDict(TypedDict, total=False): environment_key: VariableOr[str] """ - The key of an environment. It has to be unique within a job. + Required. The key of an environment. It has to be unique within a job. """ spec: VariableOrOptional[EnvironmentParam] diff --git a/python/databricks/bundles/jobs/_models/job_parameter_definition.py b/python/databricks/bundles/jobs/_models/job_parameter_definition.py index 8cd8fb9b5f..e8418fb7d1 100644 --- a/python/databricks/bundles/jobs/_models/job_parameter_definition.py +++ b/python/databricks/bundles/jobs/_models/job_parameter_definition.py @@ -15,12 +15,12 @@ class JobParameterDefinition: default: VariableOr[str] """ - Default value of the parameter. + Required. Default value of the parameter. """ name: VariableOr[str] """ - The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.` + Required. The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.` """ @classmethod @@ -36,12 +36,12 @@ class JobParameterDefinitionDict(TypedDict, total=False): default: VariableOr[str] """ - Default value of the parameter. + Required. Default value of the parameter. """ name: VariableOr[str] """ - The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.` + Required. The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.` """ diff --git a/python/databricks/bundles/jobs/_models/job_permission.py b/python/databricks/bundles/jobs/_models/job_permission.py index b2ab73cee0..402f1aafc0 100644 --- a/python/databricks/bundles/jobs/_models/job_permission.py +++ b/python/databricks/bundles/jobs/_models/job_permission.py @@ -18,6 +18,9 @@ class JobPermission: """""" level: VariableOr[JobPermissionLevel] + """ + Required. + """ group_name: VariableOrOptional[str] = None @@ -37,6 +40,9 @@ class JobPermissionDict(TypedDict, total=False): """""" level: VariableOr[JobPermissionLevelParam] + """ + Required. + """ group_name: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/job_permission_level.py b/python/databricks/bundles/jobs/_models/job_permission_level.py index fc4e5ebac7..76a11398e1 100644 --- a/python/databricks/bundles/jobs/_models/job_permission_level.py +++ b/python/databricks/bundles/jobs/_models/job_permission_level.py @@ -3,6 +3,10 @@ class JobPermissionLevel(Enum): + """ + Valid values are: `CAN_MANAGE`, `CAN_MANAGE_RUN`, `CAN_VIEW`, and `IS_OWNER`. + """ + CAN_MANAGE = "CAN_MANAGE" CAN_MANAGE_RUN = "CAN_MANAGE_RUN" CAN_VIEW = "CAN_VIEW" diff --git a/python/databricks/bundles/jobs/_models/jobs_health_operator.py b/python/databricks/bundles/jobs/_models/jobs_health_operator.py index 2d949ff07a..3dbdde0637 100644 --- a/python/databricks/bundles/jobs/_models/jobs_health_operator.py +++ b/python/databricks/bundles/jobs/_models/jobs_health_operator.py @@ -4,7 +4,7 @@ class JobsHealthOperator(Enum): """ - Specifies the operator used to compare the health metric value with the specified threshold. + Specifies the operator used to compare the health metric value with the specified threshold. Valid values are: `GREATER_THAN`. """ GREATER_THAN = "GREATER_THAN" diff --git a/python/databricks/bundles/jobs/_models/jobs_health_rule.py b/python/databricks/bundles/jobs/_models/jobs_health_rule.py index 149265c86a..f27c01615e 100644 --- a/python/databricks/bundles/jobs/_models/jobs_health_rule.py +++ b/python/databricks/bundles/jobs/_models/jobs_health_rule.py @@ -22,12 +22,18 @@ class JobsHealthRule: """""" metric: VariableOr[JobsHealthMetric] + """ + Required. + """ op: VariableOr[JobsHealthOperator] + """ + Required. + """ value: VariableOr[int] """ - Specifies the threshold value that the health metric should obey to satisfy the health rule. + Required. Specifies the threshold value that the health metric should obey to satisfy the health rule. """ @classmethod @@ -42,12 +48,18 @@ class JobsHealthRuleDict(TypedDict, total=False): """""" metric: VariableOr[JobsHealthMetricParam] + """ + Required. + """ op: VariableOr[JobsHealthOperatorParam] + """ + Required. + """ value: VariableOr[int] """ - Specifies the threshold value that the health metric should obey to satisfy the health rule. + Required. Specifies the threshold value that the health metric should obey to satisfy the health rule. """ diff --git a/python/databricks/bundles/jobs/_models/kind.py b/python/databricks/bundles/jobs/_models/kind.py index 33a487a90e..6752b26568 100644 --- a/python/databricks/bundles/jobs/_models/kind.py +++ b/python/databricks/bundles/jobs/_models/kind.py @@ -3,6 +3,10 @@ class Kind(Enum): + """ + Valid values are: `CLASSIC_PREVIEW`. + """ + CLASSIC_PREVIEW = "CLASSIC_PREVIEW" diff --git a/python/databricks/bundles/jobs/_models/local_file_info.py b/python/databricks/bundles/jobs/_models/local_file_info.py index 70d6f25820..294fd95641 100644 --- a/python/databricks/bundles/jobs/_models/local_file_info.py +++ b/python/databricks/bundles/jobs/_models/local_file_info.py @@ -15,7 +15,7 @@ class LocalFileInfo: destination: VariableOr[str] """ - local file destination, e.g. `file:/my/local/file.sh` + Required. local file destination, e.g. `file:/my/local/file.sh` """ @classmethod @@ -31,7 +31,7 @@ class LocalFileInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - local file destination, e.g. `file:/my/local/file.sh` + Required. local file destination, e.g. `file:/my/local/file.sh` """ diff --git a/python/databricks/bundles/jobs/_models/maven_library.py b/python/databricks/bundles/jobs/_models/maven_library.py index 45925700b8..d4fb6d131a 100644 --- a/python/databricks/bundles/jobs/_models/maven_library.py +++ b/python/databricks/bundles/jobs/_models/maven_library.py @@ -19,7 +19,7 @@ class MavenLibrary: coordinates: VariableOr[str] """ - Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". + Required. Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". """ exclusions: VariableOrList[str] = field(default_factory=list) @@ -49,7 +49,7 @@ class MavenLibraryDict(TypedDict, total=False): coordinates: VariableOr[str] """ - Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". + Required. Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". """ exclusions: VariableOrList[str] diff --git a/python/databricks/bundles/jobs/_models/model_trigger_configuration.py b/python/databricks/bundles/jobs/_models/model_trigger_configuration.py index 140e7e09ec..902a6f40a6 100644 --- a/python/databricks/bundles/jobs/_models/model_trigger_configuration.py +++ b/python/databricks/bundles/jobs/_models/model_trigger_configuration.py @@ -25,7 +25,7 @@ class ModelTriggerConfiguration: condition: VariableOr[ModelTriggerConfigurationCondition] """ - The condition based on which to trigger a job run. + Required. The condition based on which to trigger a job run. """ aliases: VariableOrList[str] = field(default_factory=list) @@ -65,7 +65,7 @@ class ModelTriggerConfigurationDict(TypedDict, total=False): condition: VariableOr[ModelTriggerConfigurationConditionParam] """ - The condition based on which to trigger a job run. + Required. The condition based on which to trigger a job run. """ aliases: VariableOrList[str] diff --git a/python/databricks/bundles/jobs/_models/model_trigger_configuration_condition.py b/python/databricks/bundles/jobs/_models/model_trigger_configuration_condition.py index 60d942a2c5..44cf9351c1 100644 --- a/python/databricks/bundles/jobs/_models/model_trigger_configuration_condition.py +++ b/python/databricks/bundles/jobs/_models/model_trigger_configuration_condition.py @@ -5,6 +5,8 @@ class ModelTriggerConfigurationCondition(Enum): """ :meta private: [EXPERIMENTAL] + + Valid values are: `MODEL_CREATED`, `MODEL_VERSION_READY`, and `MODEL_ALIAS_SET`. """ MODEL_CREATED = "MODEL_CREATED" diff --git a/python/databricks/bundles/jobs/_models/pause_status.py b/python/databricks/bundles/jobs/_models/pause_status.py index ee701fa10c..a0e2f0474b 100644 --- a/python/databricks/bundles/jobs/_models/pause_status.py +++ b/python/databricks/bundles/jobs/_models/pause_status.py @@ -3,6 +3,10 @@ class PauseStatus(Enum): + """ + Valid values are: `UNPAUSED` and `PAUSED`. + """ + UNPAUSED = "UNPAUSED" PAUSED = "PAUSED" diff --git a/python/databricks/bundles/jobs/_models/performance_target.py b/python/databricks/bundles/jobs/_models/performance_target.py index 5e7b83ae61..0e5fa7bc6d 100644 --- a/python/databricks/bundles/jobs/_models/performance_target.py +++ b/python/databricks/bundles/jobs/_models/performance_target.py @@ -6,7 +6,7 @@ class PerformanceTarget(Enum): """ PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be. The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager - (see cluster-common PerformanceTarget). + (see cluster-common PerformanceTarget). Valid values are: `PERFORMANCE_OPTIMIZED` and `STANDARD`. """ PERFORMANCE_OPTIMIZED = "PERFORMANCE_OPTIMIZED" diff --git a/python/databricks/bundles/jobs/_models/periodic_trigger_configuration.py b/python/databricks/bundles/jobs/_models/periodic_trigger_configuration.py index 5ff5479a88..9bf29c53ba 100644 --- a/python/databricks/bundles/jobs/_models/periodic_trigger_configuration.py +++ b/python/databricks/bundles/jobs/_models/periodic_trigger_configuration.py @@ -19,12 +19,12 @@ class PeriodicTriggerConfiguration: interval: VariableOr[int] """ - The interval at which the trigger should run. + Required. The interval at which the trigger should run. """ unit: VariableOr[PeriodicTriggerConfigurationTimeUnit] """ - The unit of time for the interval. + Required. The unit of time for the interval. """ @classmethod @@ -40,12 +40,12 @@ class PeriodicTriggerConfigurationDict(TypedDict, total=False): interval: VariableOr[int] """ - The interval at which the trigger should run. + Required. The interval at which the trigger should run. """ unit: VariableOr[PeriodicTriggerConfigurationTimeUnitParam] """ - The unit of time for the interval. + Required. The unit of time for the interval. """ diff --git a/python/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py b/python/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py index a24d023a78..90ee04bbba 100644 --- a/python/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py +++ b/python/databricks/bundles/jobs/_models/periodic_trigger_configuration_time_unit.py @@ -3,6 +3,10 @@ class PeriodicTriggerConfigurationTimeUnit(Enum): + """ + Valid values are: `HOURS`, `DAYS`, and `WEEKS`. + """ + HOURS = "HOURS" DAYS = "DAYS" WEEKS = "WEEKS" diff --git a/python/databricks/bundles/jobs/_models/pipeline_task.py b/python/databricks/bundles/jobs/_models/pipeline_task.py index 81126a8ed4..f08db53492 100644 --- a/python/databricks/bundles/jobs/_models/pipeline_task.py +++ b/python/databricks/bundles/jobs/_models/pipeline_task.py @@ -15,7 +15,7 @@ class PipelineTask: pipeline_id: VariableOr[str] """ - The full name of the pipeline task to execute. + Required. The full name of the pipeline task to execute. """ full_refresh: VariableOrOptional[bool] = None @@ -36,7 +36,7 @@ class PipelineTaskDict(TypedDict, total=False): pipeline_id: VariableOr[str] """ - The full name of the pipeline task to execute. + Required. The full name of the pipeline task to execute. """ full_refresh: VariableOrOptional[bool] diff --git a/python/databricks/bundles/jobs/_models/python_py_pi_library.py b/python/databricks/bundles/jobs/_models/python_py_pi_library.py index 58db37caf5..d9b0a030fa 100644 --- a/python/databricks/bundles/jobs/_models/python_py_pi_library.py +++ b/python/databricks/bundles/jobs/_models/python_py_pi_library.py @@ -15,7 +15,7 @@ class PythonPyPiLibrary: package: VariableOr[str] """ - The name of the pypi package to install. An optional exact version specification is also + Required. The name of the pypi package to install. An optional exact version specification is also supported. Examples: "simplejson" and "simplejson==3.8.0". """ @@ -38,7 +38,7 @@ class PythonPyPiLibraryDict(TypedDict, total=False): package: VariableOr[str] """ - The name of the pypi package to install. An optional exact version specification is also + Required. The name of the pypi package to install. An optional exact version specification is also supported. Examples: "simplejson" and "simplejson==3.8.0". """ diff --git a/python/databricks/bundles/jobs/_models/python_wheel_task.py b/python/databricks/bundles/jobs/_models/python_wheel_task.py index 07a230d8f5..bbc12e2949 100644 --- a/python/databricks/bundles/jobs/_models/python_wheel_task.py +++ b/python/databricks/bundles/jobs/_models/python_wheel_task.py @@ -15,12 +15,12 @@ class PythonWheelTask: entry_point: VariableOr[str] """ - Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()` + Required. Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()` """ package_name: VariableOr[str] """ - Name of the package to execute + Required. Name of the package to execute """ named_parameters: VariableOrDict[str] = field(default_factory=dict) @@ -46,12 +46,12 @@ class PythonWheelTaskDict(TypedDict, total=False): entry_point: VariableOr[str] """ - Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()` + Required. Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()` """ package_name: VariableOr[str] """ - Name of the package to execute + Required. Name of the package to execute """ named_parameters: VariableOrDict[str] diff --git a/python/databricks/bundles/jobs/_models/r_cran_library.py b/python/databricks/bundles/jobs/_models/r_cran_library.py index 0770f2265a..d3ec27992f 100644 --- a/python/databricks/bundles/jobs/_models/r_cran_library.py +++ b/python/databricks/bundles/jobs/_models/r_cran_library.py @@ -15,7 +15,7 @@ class RCranLibrary: package: VariableOr[str] """ - The name of the CRAN package to install. + Required. The name of the CRAN package to install. """ repo: VariableOrOptional[str] = None @@ -36,7 +36,7 @@ class RCranLibraryDict(TypedDict, total=False): package: VariableOr[str] """ - The name of the CRAN package to install. + Required. The name of the CRAN package to install. """ repo: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/run_job_task.py b/python/databricks/bundles/jobs/_models/run_job_task.py index c7b88ee5ea..1da2138f0c 100644 --- a/python/databricks/bundles/jobs/_models/run_job_task.py +++ b/python/databricks/bundles/jobs/_models/run_job_task.py @@ -23,7 +23,7 @@ class RunJobTask: job_id: VariableOr[int] """ - ID of the job to trigger. + Required. ID of the job to trigger. """ job_parameters: VariableOrDict[str] = field(default_factory=dict) @@ -49,7 +49,7 @@ class RunJobTaskDict(TypedDict, total=False): job_id: VariableOr[int] """ - ID of the job to trigger. + Required. ID of the job to trigger. """ job_parameters: VariableOrDict[str] diff --git a/python/databricks/bundles/jobs/_models/runtime_engine.py b/python/databricks/bundles/jobs/_models/runtime_engine.py index 1829c507e7..06048ef706 100644 --- a/python/databricks/bundles/jobs/_models/runtime_engine.py +++ b/python/databricks/bundles/jobs/_models/runtime_engine.py @@ -3,6 +3,10 @@ class RuntimeEngine(Enum): + """ + Valid values are: `NULL`, `STANDARD`, and `PHOTON`. + """ + NULL = "NULL" STANDARD = "STANDARD" PHOTON = "PHOTON" diff --git a/python/databricks/bundles/jobs/_models/s3_storage_info.py b/python/databricks/bundles/jobs/_models/s3_storage_info.py index b5e09063e5..a6d52a1e79 100644 --- a/python/databricks/bundles/jobs/_models/s3_storage_info.py +++ b/python/databricks/bundles/jobs/_models/s3_storage_info.py @@ -17,7 +17,7 @@ class S3StorageInfo: destination: VariableOr[str] """ - S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using + Required. S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster iam role, please make sure you set cluster iam role and the role has write access to the destination. Please also note that you cannot use AWS keys to deliver logs. """ @@ -74,7 +74,7 @@ class S3StorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using + Required. S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster iam role, please make sure you set cluster iam role and the role has write access to the destination. Please also note that you cannot use AWS keys to deliver logs. """ diff --git a/python/databricks/bundles/jobs/_models/sql_task.py b/python/databricks/bundles/jobs/_models/sql_task.py index f404a5b7a9..9cb3187a40 100644 --- a/python/databricks/bundles/jobs/_models/sql_task.py +++ b/python/databricks/bundles/jobs/_models/sql_task.py @@ -32,7 +32,7 @@ class SqlTask: warehouse_id: VariableOr[str] """ - The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs. + Required. The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs. """ alert: VariableOrOptional[SqlTaskAlert] = None @@ -73,7 +73,7 @@ class SqlTaskDict(TypedDict, total=False): warehouse_id: VariableOr[str] """ - The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs. + Required. The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs. """ alert: VariableOrOptional[SqlTaskAlertParam] diff --git a/python/databricks/bundles/jobs/_models/sql_task_alert.py b/python/databricks/bundles/jobs/_models/sql_task_alert.py index 513eecd710..33ebba5671 100644 --- a/python/databricks/bundles/jobs/_models/sql_task_alert.py +++ b/python/databricks/bundles/jobs/_models/sql_task_alert.py @@ -23,7 +23,7 @@ class SqlTaskAlert: alert_id: VariableOr[str] """ - The canonical identifier of the SQL alert. + Required. The canonical identifier of the SQL alert. """ pause_subscriptions: VariableOrOptional[bool] = None @@ -49,7 +49,7 @@ class SqlTaskAlertDict(TypedDict, total=False): alert_id: VariableOr[str] """ - The canonical identifier of the SQL alert. + Required. The canonical identifier of the SQL alert. """ pause_subscriptions: VariableOrOptional[bool] diff --git a/python/databricks/bundles/jobs/_models/sql_task_dashboard.py b/python/databricks/bundles/jobs/_models/sql_task_dashboard.py index bc0a9b831f..4c55852bc7 100644 --- a/python/databricks/bundles/jobs/_models/sql_task_dashboard.py +++ b/python/databricks/bundles/jobs/_models/sql_task_dashboard.py @@ -23,7 +23,7 @@ class SqlTaskDashboard: dashboard_id: VariableOr[str] """ - The canonical identifier of the SQL dashboard. + Required. The canonical identifier of the SQL dashboard. """ custom_subject: VariableOrOptional[str] = None @@ -54,7 +54,7 @@ class SqlTaskDashboardDict(TypedDict, total=False): dashboard_id: VariableOr[str] """ - The canonical identifier of the SQL dashboard. + Required. The canonical identifier of the SQL dashboard. """ custom_subject: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/sql_task_file.py b/python/databricks/bundles/jobs/_models/sql_task_file.py index a0c02a2ca1..f455ad2599 100644 --- a/python/databricks/bundles/jobs/_models/sql_task_file.py +++ b/python/databricks/bundles/jobs/_models/sql_task_file.py @@ -16,7 +16,7 @@ class SqlTaskFile: path: VariableOr[str] """ - Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths. + Required. Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths. """ source: VariableOrOptional[Source] = None @@ -42,7 +42,7 @@ class SqlTaskFileDict(TypedDict, total=False): path: VariableOr[str] """ - Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths. + Required. Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths. """ source: VariableOrOptional[SourceParam] diff --git a/python/databricks/bundles/jobs/_models/sql_task_query.py b/python/databricks/bundles/jobs/_models/sql_task_query.py index c367bff828..c283534659 100644 --- a/python/databricks/bundles/jobs/_models/sql_task_query.py +++ b/python/databricks/bundles/jobs/_models/sql_task_query.py @@ -15,7 +15,7 @@ class SqlTaskQuery: query_id: VariableOr[str] """ - The canonical identifier of the SQL query. + Required. The canonical identifier of the SQL query. """ @classmethod @@ -31,7 +31,7 @@ class SqlTaskQueryDict(TypedDict, total=False): query_id: VariableOr[str] """ - The canonical identifier of the SQL query. + Required. The canonical identifier of the SQL query. """ diff --git a/python/databricks/bundles/jobs/_models/storage_mode.py b/python/databricks/bundles/jobs/_models/storage_mode.py index 764e2e7d54..f7464474fd 100644 --- a/python/databricks/bundles/jobs/_models/storage_mode.py +++ b/python/databricks/bundles/jobs/_models/storage_mode.py @@ -3,6 +3,10 @@ class StorageMode(Enum): + """ + Valid values are: `DIRECT_QUERY`, `IMPORT`, and `DUAL`. + """ + DIRECT_QUERY = "DIRECT_QUERY" IMPORT = "IMPORT" DUAL = "DUAL" diff --git a/python/databricks/bundles/jobs/_models/table_update_trigger_configuration.py b/python/databricks/bundles/jobs/_models/table_update_trigger_configuration.py index c824a72499..4c5ea26532 100644 --- a/python/databricks/bundles/jobs/_models/table_update_trigger_configuration.py +++ b/python/databricks/bundles/jobs/_models/table_update_trigger_configuration.py @@ -27,7 +27,7 @@ class TableUpdateTriggerConfiguration: table_names: VariableOrList[str] = field(default_factory=list) """ - A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`. + Required. A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`. """ wait_after_last_change_seconds: VariableOrOptional[int] = None @@ -61,7 +61,7 @@ class TableUpdateTriggerConfigurationDict(TypedDict, total=False): table_names: VariableOrList[str] """ - A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`. + Required. A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`. """ wait_after_last_change_seconds: VariableOrOptional[int] diff --git a/python/databricks/bundles/jobs/_models/task_dependency.py b/python/databricks/bundles/jobs/_models/task_dependency.py index 5f8fd3e607..5eabc0ba64 100644 --- a/python/databricks/bundles/jobs/_models/task_dependency.py +++ b/python/databricks/bundles/jobs/_models/task_dependency.py @@ -15,7 +15,7 @@ class TaskDependency: task_key: VariableOr[str] """ - The name of the task this task depends on. + Required. The name of the task this task depends on. """ outcome: VariableOrOptional[str] = None @@ -36,7 +36,7 @@ class TaskDependencyDict(TypedDict, total=False): task_key: VariableOr[str] """ - The name of the task this task depends on. + Required. The name of the task this task depends on. """ outcome: VariableOrOptional[str] diff --git a/python/databricks/bundles/jobs/_models/volumes_storage_info.py b/python/databricks/bundles/jobs/_models/volumes_storage_info.py index cbe74758fc..22fb4e3b5a 100644 --- a/python/databricks/bundles/jobs/_models/volumes_storage_info.py +++ b/python/databricks/bundles/jobs/_models/volumes_storage_info.py @@ -17,7 +17,7 @@ class VolumesStorageInfo: destination: VariableOr[str] """ - UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` + Required. UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` """ @@ -34,7 +34,7 @@ class VolumesStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` + Required. UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` """ diff --git a/python/databricks/bundles/jobs/_models/webhook.py b/python/databricks/bundles/jobs/_models/webhook.py index 5e47d57f83..5b4b3692d4 100644 --- a/python/databricks/bundles/jobs/_models/webhook.py +++ b/python/databricks/bundles/jobs/_models/webhook.py @@ -14,6 +14,9 @@ class Webhook: """""" id: VariableOr[str] + """ + Required. + """ @classmethod def from_dict(cls, value: "WebhookDict") -> "Self": @@ -27,6 +30,9 @@ class WebhookDict(TypedDict, total=False): """""" id: VariableOr[str] + """ + Required. + """ WebhookParam = WebhookDict | Webhook diff --git a/python/databricks/bundles/jobs/_models/workload_type.py b/python/databricks/bundles/jobs/_models/workload_type.py index db40f98df0..491f7db9ab 100644 --- a/python/databricks/bundles/jobs/_models/workload_type.py +++ b/python/databricks/bundles/jobs/_models/workload_type.py @@ -21,7 +21,7 @@ class WorkloadType: clients: VariableOr[ClientsTypes] """ - defined what type of clients can use the cluster. E.g. Notebooks, Jobs + Required. defined what type of clients can use the cluster. E.g. Notebooks, Jobs """ @classmethod @@ -37,7 +37,7 @@ class WorkloadTypeDict(TypedDict, total=False): clients: VariableOr[ClientsTypesParam] """ - defined what type of clients can use the cluster. E.g. Notebooks, Jobs + Required. defined what type of clients can use the cluster. E.g. Notebooks, Jobs """ diff --git a/python/databricks/bundles/jobs/_models/workspace_storage_info.py b/python/databricks/bundles/jobs/_models/workspace_storage_info.py index 29c075ac2f..7b813666a3 100644 --- a/python/databricks/bundles/jobs/_models/workspace_storage_info.py +++ b/python/databricks/bundles/jobs/_models/workspace_storage_info.py @@ -17,7 +17,7 @@ class WorkspaceStorageInfo: destination: VariableOr[str] """ - wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` + Required. wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` """ @classmethod @@ -33,7 +33,7 @@ class WorkspaceStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` + Required. wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` """ diff --git a/python/databricks/bundles/pipelines/_models/adlsgen2_info.py b/python/databricks/bundles/pipelines/_models/adlsgen2_info.py index 206c8b676e..da58124edb 100644 --- a/python/databricks/bundles/pipelines/_models/adlsgen2_info.py +++ b/python/databricks/bundles/pipelines/_models/adlsgen2_info.py @@ -17,7 +17,7 @@ class Adlsgen2Info: destination: VariableOr[str] """ - abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. + Required. abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. """ @classmethod @@ -33,7 +33,7 @@ class Adlsgen2InfoDict(TypedDict, total=False): destination: VariableOr[str] """ - abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. + Required. abfss destination, e.g. `abfss://@.dfs.core.windows.net/`. """ diff --git a/python/databricks/bundles/pipelines/_models/aws_availability.py b/python/databricks/bundles/pipelines/_models/aws_availability.py index 5d87ffafba..6c5cd5ac01 100644 --- a/python/databricks/bundles/pipelines/_models/aws_availability.py +++ b/python/databricks/bundles/pipelines/_models/aws_availability.py @@ -6,7 +6,7 @@ class AwsAvailability(Enum): """ Availability type used for all subsequent nodes past the `first_on_demand` ones. - Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. Valid values are: `SPOT`, `ON_DEMAND`, and `SPOT_WITH_FALLBACK`. """ SPOT = "SPOT" diff --git a/python/databricks/bundles/pipelines/_models/azure_availability.py b/python/databricks/bundles/pipelines/_models/azure_availability.py index 72d461d5d7..b8f9588e15 100644 --- a/python/databricks/bundles/pipelines/_models/azure_availability.py +++ b/python/databricks/bundles/pipelines/_models/azure_availability.py @@ -5,7 +5,7 @@ class AzureAvailability(Enum): """ Availability type used for all subsequent nodes past the `first_on_demand` ones. - Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster. Valid values are: `SPOT_AZURE`, `ON_DEMAND_AZURE`, and `SPOT_WITH_FALLBACK_AZURE`. """ SPOT_AZURE = "SPOT_AZURE" diff --git a/python/databricks/bundles/pipelines/_models/day_of_week.py b/python/databricks/bundles/pipelines/_models/day_of_week.py index a685c2b308..b3c3884ae0 100644 --- a/python/databricks/bundles/pipelines/_models/day_of_week.py +++ b/python/databricks/bundles/pipelines/_models/day_of_week.py @@ -7,7 +7,7 @@ class DayOfWeek(Enum): :meta private: [EXPERIMENTAL] Days of week in which the window is allowed to happen. - If not specified all days of the week will be used. + If not specified all days of the week will be used. Valid values are: `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`. """ MONDAY = "MONDAY" diff --git a/python/databricks/bundles/pipelines/_models/dbfs_storage_info.py b/python/databricks/bundles/pipelines/_models/dbfs_storage_info.py index 81fe319a65..33d9a3bdc4 100644 --- a/python/databricks/bundles/pipelines/_models/dbfs_storage_info.py +++ b/python/databricks/bundles/pipelines/_models/dbfs_storage_info.py @@ -17,7 +17,7 @@ class DbfsStorageInfo: destination: VariableOr[str] """ - dbfs destination, e.g. `dbfs:/my/path` + Required. dbfs destination, e.g. `dbfs:/my/path` """ @classmethod @@ -33,7 +33,7 @@ class DbfsStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - dbfs destination, e.g. `dbfs:/my/path` + Required. dbfs destination, e.g. `dbfs:/my/path` """ diff --git a/python/databricks/bundles/pipelines/_models/ebs_volume_type.py b/python/databricks/bundles/pipelines/_models/ebs_volume_type.py index b67853f8cb..57818c743e 100644 --- a/python/databricks/bundles/pipelines/_models/ebs_volume_type.py +++ b/python/databricks/bundles/pipelines/_models/ebs_volume_type.py @@ -5,7 +5,7 @@ class EbsVolumeType(Enum): """ All EBS volume types that Databricks supports. - See https://aws.amazon.com/ebs/details/ for details. + See https://aws.amazon.com/ebs/details/ for details. Valid values are: `GENERAL_PURPOSE_SSD` and `THROUGHPUT_OPTIMIZED_HDD`. """ GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" diff --git a/python/databricks/bundles/pipelines/_models/gcp_availability.py b/python/databricks/bundles/pipelines/_models/gcp_availability.py index aa8e785a71..89182a3026 100644 --- a/python/databricks/bundles/pipelines/_models/gcp_availability.py +++ b/python/databricks/bundles/pipelines/_models/gcp_availability.py @@ -5,7 +5,7 @@ class GcpAvailability(Enum): """ This field determines whether the instance pool will contain preemptible - VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable. + VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable. Valid values are: `PREEMPTIBLE_GCP`, `ON_DEMAND_GCP`, and `PREEMPTIBLE_WITH_FALLBACK_GCP`. """ PREEMPTIBLE_GCP = "PREEMPTIBLE_GCP" diff --git a/python/databricks/bundles/pipelines/_models/gcs_storage_info.py b/python/databricks/bundles/pipelines/_models/gcs_storage_info.py index a5e6d51e6e..e0c38c2a57 100644 --- a/python/databricks/bundles/pipelines/_models/gcs_storage_info.py +++ b/python/databricks/bundles/pipelines/_models/gcs_storage_info.py @@ -17,7 +17,7 @@ class GcsStorageInfo: destination: VariableOr[str] """ - GCS destination/URI, e.g. `gs://my-bucket/some-prefix` + Required. GCS destination/URI, e.g. `gs://my-bucket/some-prefix` """ @classmethod @@ -33,7 +33,7 @@ class GcsStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - GCS destination/URI, e.g. `gs://my-bucket/some-prefix` + Required. GCS destination/URI, e.g. `gs://my-bucket/some-prefix` """ diff --git a/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py b/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py index abe37602e5..99127c07f6 100644 --- a/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +++ b/python/databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py @@ -21,7 +21,7 @@ class IngestionGatewayPipelineDefinition: connection_name: VariableOr[str] """ - Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source. + Required. Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source. """ gateway_storage_catalog: VariableOr[str] @@ -66,7 +66,7 @@ class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False): connection_name: VariableOr[str] """ - Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source. + Required. Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source. """ gateway_storage_catalog: VariableOr[str] diff --git a/python/databricks/bundles/pipelines/_models/local_file_info.py b/python/databricks/bundles/pipelines/_models/local_file_info.py index 70d6f25820..294fd95641 100644 --- a/python/databricks/bundles/pipelines/_models/local_file_info.py +++ b/python/databricks/bundles/pipelines/_models/local_file_info.py @@ -15,7 +15,7 @@ class LocalFileInfo: destination: VariableOr[str] """ - local file destination, e.g. `file:/my/local/file.sh` + Required. local file destination, e.g. `file:/my/local/file.sh` """ @classmethod @@ -31,7 +31,7 @@ class LocalFileInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - local file destination, e.g. `file:/my/local/file.sh` + Required. local file destination, e.g. `file:/my/local/file.sh` """ diff --git a/python/databricks/bundles/pipelines/_models/maven_library.py b/python/databricks/bundles/pipelines/_models/maven_library.py index 45925700b8..d4fb6d131a 100644 --- a/python/databricks/bundles/pipelines/_models/maven_library.py +++ b/python/databricks/bundles/pipelines/_models/maven_library.py @@ -19,7 +19,7 @@ class MavenLibrary: coordinates: VariableOr[str] """ - Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". + Required. Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". """ exclusions: VariableOrList[str] = field(default_factory=list) @@ -49,7 +49,7 @@ class MavenLibraryDict(TypedDict, total=False): coordinates: VariableOr[str] """ - Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". + Required. Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2". """ exclusions: VariableOrList[str] diff --git a/python/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py b/python/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py index 58afdbefab..f57e4acb44 100644 --- a/python/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py +++ b/python/databricks/bundles/pipelines/_models/pipeline_cluster_autoscale.py @@ -19,12 +19,12 @@ class PipelineClusterAutoscale: max_workers: VariableOr[int] """ - The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`. + Required. The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`. """ min_workers: VariableOr[int] """ - The minimum number of workers the cluster can scale down to when underutilized. + Required. The minimum number of workers the cluster can scale down to when underutilized. It is also the initial number of workers the cluster will have after creation. """ @@ -50,12 +50,12 @@ class PipelineClusterAutoscaleDict(TypedDict, total=False): max_workers: VariableOr[int] """ - The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`. + Required. The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`. """ min_workers: VariableOr[int] """ - The minimum number of workers the cluster can scale down to when underutilized. + Required. The minimum number of workers the cluster can scale down to when underutilized. It is also the initial number of workers the cluster will have after creation. """ diff --git a/python/databricks/bundles/pipelines/_models/pipeline_permission.py b/python/databricks/bundles/pipelines/_models/pipeline_permission.py index 0a3ed95538..e9b8bd8854 100644 --- a/python/databricks/bundles/pipelines/_models/pipeline_permission.py +++ b/python/databricks/bundles/pipelines/_models/pipeline_permission.py @@ -18,6 +18,9 @@ class PipelinePermission: """""" level: VariableOr[PipelinePermissionLevel] + """ + Required. + """ group_name: VariableOrOptional[str] = None @@ -37,6 +40,9 @@ class PipelinePermissionDict(TypedDict, total=False): """""" level: VariableOr[PipelinePermissionLevelParam] + """ + Required. + """ group_name: VariableOrOptional[str] diff --git a/python/databricks/bundles/pipelines/_models/pipeline_permission_level.py b/python/databricks/bundles/pipelines/_models/pipeline_permission_level.py index 3e6edf3080..67f9e881d0 100644 --- a/python/databricks/bundles/pipelines/_models/pipeline_permission_level.py +++ b/python/databricks/bundles/pipelines/_models/pipeline_permission_level.py @@ -3,6 +3,10 @@ class PipelinePermissionLevel(Enum): + """ + Valid values are: `CAN_MANAGE`, `IS_OWNER`, `CAN_RUN`, and `CAN_VIEW`. + """ + CAN_MANAGE = "CAN_MANAGE" IS_OWNER = "IS_OWNER" CAN_RUN = "CAN_RUN" diff --git a/python/databricks/bundles/pipelines/_models/restart_window.py b/python/databricks/bundles/pipelines/_models/restart_window.py index 2385a32c7a..f2f679c94b 100644 --- a/python/databricks/bundles/pipelines/_models/restart_window.py +++ b/python/databricks/bundles/pipelines/_models/restart_window.py @@ -22,7 +22,7 @@ class RestartWindow: start_hour: VariableOr[int] """ - An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. + Required. An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. Continuous pipeline restart is triggered only within a five-hour window starting at this hour. """ @@ -51,7 +51,7 @@ class RestartWindowDict(TypedDict, total=False): start_hour: VariableOr[int] """ - An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. + Required. An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. Continuous pipeline restart is triggered only within a five-hour window starting at this hour. """ diff --git a/python/databricks/bundles/pipelines/_models/s3_storage_info.py b/python/databricks/bundles/pipelines/_models/s3_storage_info.py index b5e09063e5..a6d52a1e79 100644 --- a/python/databricks/bundles/pipelines/_models/s3_storage_info.py +++ b/python/databricks/bundles/pipelines/_models/s3_storage_info.py @@ -17,7 +17,7 @@ class S3StorageInfo: destination: VariableOr[str] """ - S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using + Required. S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster iam role, please make sure you set cluster iam role and the role has write access to the destination. Please also note that you cannot use AWS keys to deliver logs. """ @@ -74,7 +74,7 @@ class S3StorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using + Required. S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster iam role, please make sure you set cluster iam role and the role has write access to the destination. Please also note that you cannot use AWS keys to deliver logs. """ diff --git a/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py b/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py index 6d138b7808..095d744c9e 100644 --- a/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py +++ b/python/databricks/bundles/pipelines/_models/table_specific_config_scd_type.py @@ -6,7 +6,7 @@ class TableSpecificConfigScdType(Enum): """ :meta private: [EXPERIMENTAL] - The SCD type to use to ingest the table. + The SCD type to use to ingest the table. Valid values are: `SCD_TYPE_1`, `SCD_TYPE_2`, and `APPEND_ONLY`. """ SCD_TYPE_1 = "SCD_TYPE_1" diff --git a/python/databricks/bundles/pipelines/_models/volumes_storage_info.py b/python/databricks/bundles/pipelines/_models/volumes_storage_info.py index cbe74758fc..22fb4e3b5a 100644 --- a/python/databricks/bundles/pipelines/_models/volumes_storage_info.py +++ b/python/databricks/bundles/pipelines/_models/volumes_storage_info.py @@ -17,7 +17,7 @@ class VolumesStorageInfo: destination: VariableOr[str] """ - UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` + Required. UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` """ @@ -34,7 +34,7 @@ class VolumesStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` + Required. UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` """ diff --git a/python/databricks/bundles/pipelines/_models/workspace_storage_info.py b/python/databricks/bundles/pipelines/_models/workspace_storage_info.py index 29c075ac2f..7b813666a3 100644 --- a/python/databricks/bundles/pipelines/_models/workspace_storage_info.py +++ b/python/databricks/bundles/pipelines/_models/workspace_storage_info.py @@ -17,7 +17,7 @@ class WorkspaceStorageInfo: destination: VariableOr[str] """ - wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` + Required. wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` """ @classmethod @@ -33,7 +33,7 @@ class WorkspaceStorageInfoDict(TypedDict, total=False): destination: VariableOr[str] """ - wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` + Required. wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` """ diff --git a/python/databricks/bundles/schemas/_models/schema.py b/python/databricks/bundles/schemas/_models/schema.py index 58975f0474..113c24605d 100644 --- a/python/databricks/bundles/schemas/_models/schema.py +++ b/python/databricks/bundles/schemas/_models/schema.py @@ -26,12 +26,12 @@ class Schema(Resource): catalog_name: VariableOr[str] """ - Name of parent catalog. + Required. Name of parent catalog. """ name: VariableOr[str] """ - Name of schema, relative to parent catalog. + Required. Name of schema, relative to parent catalog. """ comment: VariableOrOptional[str] = None @@ -66,12 +66,12 @@ class SchemaDict(TypedDict, total=False): catalog_name: VariableOr[str] """ - Name of parent catalog. + Required. Name of parent catalog. """ name: VariableOr[str] """ - Name of schema, relative to parent catalog. + Required. Name of schema, relative to parent catalog. """ comment: VariableOrOptional[str] diff --git a/python/databricks/bundles/schemas/_models/schema_grant.py b/python/databricks/bundles/schemas/_models/schema_grant.py index 4997db89c6..41b754228a 100644 --- a/python/databricks/bundles/schemas/_models/schema_grant.py +++ b/python/databricks/bundles/schemas/_models/schema_grant.py @@ -18,8 +18,14 @@ class SchemaGrant: """""" principal: VariableOr[str] + """ + Required. + """ privileges: VariableOrList[SchemaGrantPrivilege] = field(default_factory=list) + """ + Required. + """ @classmethod def from_dict(cls, value: "SchemaGrantDict") -> "Self": @@ -33,8 +39,14 @@ class SchemaGrantDict(TypedDict, total=False): """""" principal: VariableOr[str] + """ + Required. + """ privileges: VariableOrList[SchemaGrantPrivilegeParam] + """ + Required. + """ SchemaGrantParam = SchemaGrantDict | SchemaGrant diff --git a/python/databricks/bundles/schemas/_models/schema_grant_privilege.py b/python/databricks/bundles/schemas/_models/schema_grant_privilege.py index b0dba59b7d..dda821874d 100644 --- a/python/databricks/bundles/schemas/_models/schema_grant_privilege.py +++ b/python/databricks/bundles/schemas/_models/schema_grant_privilege.py @@ -3,6 +3,10 @@ class SchemaGrantPrivilege(Enum): + """ + Valid values are: `ALL_PRIVILEGES`, `APPLY_TAG`, `CREATE_FUNCTION`, `CREATE_TABLE`, `CREATE_VOLUME`, `MANAGE`, `USE_SCHEMA`, `EXECUTE`, `MODIFY`, `REFRESH`, `SELECT`, `READ_VOLUME`, and `WRITE_VOLUME`. + """ + ALL_PRIVILEGES = "ALL_PRIVILEGES" APPLY_TAG = "APPLY_TAG" CREATE_FUNCTION = "CREATE_FUNCTION" diff --git a/python/databricks/bundles/volumes/_models/volume.py b/python/databricks/bundles/volumes/_models/volume.py index 20132cca96..9f99605e10 100644 --- a/python/databricks/bundles/volumes/_models/volume.py +++ b/python/databricks/bundles/volumes/_models/volume.py @@ -26,17 +26,17 @@ class Volume(Resource): catalog_name: VariableOr[str] """ - The name of the catalog where the schema and the volume are + Required. The name of the catalog where the schema and the volume are """ name: VariableOr[str] """ - The name of the volume + Required. The name of the volume """ schema_name: VariableOr[str] """ - The name of the schema where the volume is + Required. The name of the schema where the volume is """ comment: VariableOrOptional[str] = None @@ -71,17 +71,17 @@ class VolumeDict(TypedDict, total=False): catalog_name: VariableOr[str] """ - The name of the catalog where the schema and the volume are + Required. The name of the catalog where the schema and the volume are """ name: VariableOr[str] """ - The name of the volume + Required. The name of the volume """ schema_name: VariableOr[str] """ - The name of the schema where the volume is + Required. The name of the schema where the volume is """ comment: VariableOrOptional[str] diff --git a/python/databricks/bundles/volumes/_models/volume_grant.py b/python/databricks/bundles/volumes/_models/volume_grant.py index 3bd580371e..55d31e48af 100644 --- a/python/databricks/bundles/volumes/_models/volume_grant.py +++ b/python/databricks/bundles/volumes/_models/volume_grant.py @@ -18,8 +18,14 @@ class VolumeGrant: """""" principal: VariableOr[str] + """ + Required. + """ privileges: VariableOrList[VolumeGrantPrivilege] = field(default_factory=list) + """ + Required. + """ @classmethod def from_dict(cls, value: "VolumeGrantDict") -> "Self": @@ -33,8 +39,14 @@ class VolumeGrantDict(TypedDict, total=False): """""" principal: VariableOr[str] + """ + Required. + """ privileges: VariableOrList[VolumeGrantPrivilegeParam] + """ + Required. + """ VolumeGrantParam = VolumeGrantDict | VolumeGrant diff --git a/python/databricks/bundles/volumes/_models/volume_grant_privilege.py b/python/databricks/bundles/volumes/_models/volume_grant_privilege.py index d758ca6d82..8cf39a80ab 100644 --- a/python/databricks/bundles/volumes/_models/volume_grant_privilege.py +++ b/python/databricks/bundles/volumes/_models/volume_grant_privilege.py @@ -3,6 +3,10 @@ class VolumeGrantPrivilege(Enum): + """ + Valid values are: `ALL_PRIVILEGES`, `APPLY_TAG`, `MANAGE`, `READ_VOLUME`, and `WRITE_VOLUME`. + """ + ALL_PRIVILEGES = "ALL_PRIVILEGES" APPLY_TAG = "APPLY_TAG" MANAGE = "MANAGE" diff --git a/python/databricks/bundles/volumes/_models/volume_type.py b/python/databricks/bundles/volumes/_models/volume_type.py index 5c96db8fde..2733a31e17 100644 --- a/python/databricks/bundles/volumes/_models/volume_type.py +++ b/python/databricks/bundles/volumes/_models/volume_type.py @@ -3,6 +3,10 @@ class VolumeType(Enum): + """ + Valid values are: `MANAGED` and `EXTERNAL`. + """ + MANAGED = "MANAGED" EXTERNAL = "EXTERNAL"