diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 5d157b67c..74a1330dc 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -2ac9d66a86b8772814266c0794730e62719ab299 \ No newline at end of file +1455a7a0955cf9d56364646fe54cdc1b143a2829 \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md old mode 100644 new mode 100755 index 4a01a0358..9f44d8ca5 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,7 @@ ### Internal Changes ### API Changes +* Add `dataframeSchema`, `filterCondition` and `transformationSql` fields for `com.databricks.sdk.service.ml.DeltaTableSource`. +* Add `environmentVersion` field for `com.databricks.sdk.service.pipelines.PipelinesEnvironment`. +* Add `resetCheckpointSelection` field for `com.databricks.sdk.service.pipelines.StartUpdate`. +* [Breaking] Remove `oauth2AppClientId` and `oauth2AppIntegrationId` fields for `com.databricks.sdk.service.apps.Space`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Space.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Space.java index 03b001f82..4f53e29dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Space.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/Space.java @@ -42,14 +42,6 @@ public class Space { @JsonProperty("name") private String name; - /** The OAuth2 app client ID for the app space. */ - @JsonProperty("oauth2_app_client_id") - private String oauth2AppClientId; - - /** The OAuth2 app integration ID for the app space. */ - @JsonProperty("oauth2_app_integration_id") - private String oauth2AppIntegrationId; - /** * Resources for the app space. Resources configured at the space level are available to all apps * in the space. @@ -152,24 +144,6 @@ public String getName() { return name; } - public Space setOauth2AppClientId(String oauth2AppClientId) { - this.oauth2AppClientId = oauth2AppClientId; - return this; - } - - public String getOauth2AppClientId() { - return oauth2AppClientId; - } - - public Space setOauth2AppIntegrationId(String oauth2AppIntegrationId) { - this.oauth2AppIntegrationId = oauth2AppIntegrationId; - return this; - } - - public String getOauth2AppIntegrationId() { - return oauth2AppIntegrationId; - } - public Space setResources(Collection resources) { this.resources = resources; return this; @@ -263,8 +237,6 @@ public boolean equals(Object o) { && Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes) && Objects.equals(id, that.id) && Objects.equals(name, that.name) - && Objects.equals(oauth2AppClientId, that.oauth2AppClientId) - && Objects.equals(oauth2AppIntegrationId, that.oauth2AppIntegrationId) && Objects.equals(resources, that.resources) && Objects.equals(servicePrincipalClientId, that.servicePrincipalClientId) && Objects.equals(servicePrincipalId, that.servicePrincipalId) @@ -286,8 +258,6 @@ public int hashCode() { effectiveUserApiScopes, id, name, - oauth2AppClientId, - oauth2AppIntegrationId, resources, servicePrincipalClientId, servicePrincipalId, @@ -309,8 +279,6 @@ public String toString() { .add("effectiveUserApiScopes", effectiveUserApiScopes) .add("id", id) .add("name", name) - .add("oauth2AppClientId", oauth2AppClientId) - .add("oauth2AppIntegrationId", oauth2AppIntegrationId) .add("resources", resources) .add("servicePrincipalClientId", servicePrincipalClientId) .add("servicePrincipalId", servicePrincipalId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java index 91046f686..9cd70ad33 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Latest kind: CONNECTION_JDBC_OAUTH_M2M = 298; Next id: 299 */ +/** Latest kind: EXTERNAL_LOCATION_ONELAKE_MANAGED = 299; Next id: 300 */ @Generated public enum SecurableKind { TABLE_DB_STORAGE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java index 1c2130436..421e6aabe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeltaTableSource.java @@ -10,10 +10,25 @@ @Generated public class DeltaTableSource { + /** + * Schema of the resulting dataframe after transformations, in Spark StructType JSON format (from + * df.schema.json()). Required if transformation_sql is specified. Example: + * {"type":"struct","fields":[{"name":"col_a","type":"integer","nullable":true,"metadata":{}},{"name":"col_c","type":"integer","nullable":true,"metadata":{}}]} + */ + @JsonProperty("dataframe_schema") + private String dataframeSchema; + /** The entity columns of the Delta table. */ @JsonProperty("entity_columns") private Collection entityColumns; + /** + * Single WHERE clause to filter delta table before applying transformations. Will be row-wise + * evaluated, so should only include conditionals and projections. + */ + @JsonProperty("filter_condition") + private String filterCondition; + /** The full three-part (catalog, schema, table) name of the Delta table. */ @JsonProperty("full_name") private String fullName; @@ -22,6 +37,24 @@ public class DeltaTableSource { @JsonProperty("timeseries_column") private String timeseriesColumn; + /** + * A single SQL SELECT expression applied after filter_condition. Should contains all the columns + * needed (eg. "SELECT *, col_a + col_b AS col_c FROM x.y.z WHERE col_a > 0" would have + * `transformation_sql` "*, col_a + col_b AS col_c") If transformation_sql is not provided, all + * columns of the delta table are present in the DataSource dataframe. + */ + @JsonProperty("transformation_sql") + private String transformationSql; + + public DeltaTableSource setDataframeSchema(String dataframeSchema) { + this.dataframeSchema = dataframeSchema; + return this; + } + + public String getDataframeSchema() { + return dataframeSchema; + } + public DeltaTableSource setEntityColumns(Collection entityColumns) { this.entityColumns = entityColumns; return this; @@ -31,6 +64,15 @@ public Collection getEntityColumns() { return entityColumns; } + public DeltaTableSource setFilterCondition(String filterCondition) { + this.filterCondition = filterCondition; + return this; + } + + public String getFilterCondition() { + return filterCondition; + } + public DeltaTableSource setFullName(String fullName) { this.fullName = fullName; return this; @@ -49,27 +91,48 @@ public String getTimeseriesColumn() { return timeseriesColumn; } + public DeltaTableSource setTransformationSql(String transformationSql) { + this.transformationSql = transformationSql; + return this; + } + + public String getTransformationSql() { + return transformationSql; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeltaTableSource that = (DeltaTableSource) o; - return Objects.equals(entityColumns, that.entityColumns) + return Objects.equals(dataframeSchema, that.dataframeSchema) + && Objects.equals(entityColumns, that.entityColumns) + && Objects.equals(filterCondition, that.filterCondition) && Objects.equals(fullName, that.fullName) - && Objects.equals(timeseriesColumn, that.timeseriesColumn); + && Objects.equals(timeseriesColumn, that.timeseriesColumn) + && Objects.equals(transformationSql, that.transformationSql); } @Override public int hashCode() { - return Objects.hash(entityColumns, fullName, timeseriesColumn); + return Objects.hash( + dataframeSchema, + entityColumns, + filterCondition, + fullName, + timeseriesColumn, + transformationSql); } @Override public String toString() { return new ToStringer(DeltaTableSource.class) + .add("dataframeSchema", dataframeSchema) .add("entityColumns", entityColumns) + .add("filterCondition", filterCondition) .add("fullName", fullName) .add("timeseriesColumn", timeseriesColumn) + .add("transformationSql", transformationSql) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java index 4f9146503..88c63f2b7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesEnvironment.java @@ -25,6 +25,22 @@ public class PipelinesEnvironment { @JsonProperty("dependencies") private Collection dependencies; + /** + * The environment version of the serverless Python environment used to execute customer Python + * code. Each environment version includes a specific Python version and a curated set of + * pre-installed libraries with defined versions, providing a stable and reproducible execution + * environment. + * + *

Databricks supports a three-year lifecycle for each environment version. For available + * versions and their included packages, see + * https://docs.databricks.com/aws/en/release-notes/serverless/environment-version/ + * + *

The value should be a string representing the environment version number, for example: + * `"4"`. + */ + @JsonProperty("environment_version") + private String environmentVersion; + public PipelinesEnvironment setDependencies(Collection dependencies) { this.dependencies = dependencies; return this; @@ -34,21 +50,34 @@ public Collection getDependencies() { return dependencies; } + public PipelinesEnvironment setEnvironmentVersion(String environmentVersion) { + this.environmentVersion = environmentVersion; + return this; + } + + public String getEnvironmentVersion() { + return environmentVersion; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PipelinesEnvironment that = (PipelinesEnvironment) o; - return Objects.equals(dependencies, that.dependencies); + return Objects.equals(dependencies, that.dependencies) + && Objects.equals(environmentVersion, that.environmentVersion); } @Override public int hashCode() { - return Objects.hash(dependencies); + return Objects.hash(dependencies, environmentVersion); } @Override public String toString() { - return new ToStringer(PipelinesEnvironment.class).add("dependencies", dependencies).toString(); + return new ToStringer(PipelinesEnvironment.class) + .add("dependencies", dependencies) + .add("environmentVersion", environmentVersion) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java index f84740ebb..ddb29dcd7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java @@ -50,6 +50,14 @@ public class StartUpdate { @JsonProperty("replace_where_overrides") private Collection replaceWhereOverrides; + /** + * A list of flows for which this update should reset the streaming checkpoint. This selection + * will not clear the data in the flow's target table. Flows in this list may also appear in + * refresh_selection and full_refresh_selection. + */ + @JsonProperty("reset_checkpoint_selection") + private Collection resetCheckpointSelection; + /** * The information about the requested rewind operation. If specified this is a rewind mode * update. @@ -128,6 +136,15 @@ public Collection getReplaceWhereOverrides() { return replaceWhereOverrides; } + public StartUpdate setResetCheckpointSelection(Collection resetCheckpointSelection) { + this.resetCheckpointSelection = resetCheckpointSelection; + return this; + } + + public Collection getResetCheckpointSelection() { + return resetCheckpointSelection; + } + public StartUpdate setRewindSpec(RewindSpec rewindSpec) { this.rewindSpec = rewindSpec; return this; @@ -158,6 +175,7 @@ public boolean equals(Object o) { && Objects.equals(pipelineId, that.pipelineId) && Objects.equals(refreshSelection, that.refreshSelection) && Objects.equals(replaceWhereOverrides, that.replaceWhereOverrides) + && Objects.equals(resetCheckpointSelection, that.resetCheckpointSelection) && Objects.equals(rewindSpec, that.rewindSpec) && Objects.equals(validateOnly, that.validateOnly); } @@ -172,6 +190,7 @@ public int hashCode() { pipelineId, refreshSelection, replaceWhereOverrides, + resetCheckpointSelection, rewindSpec, validateOnly); } @@ -186,6 +205,7 @@ public String toString() { .add("pipelineId", pipelineId) .add("refreshSelection", refreshSelection) .add("replaceWhereOverrides", replaceWhereOverrides) + .add("resetCheckpointSelection", resetCheckpointSelection) .add("rewindSpec", rewindSpec) .add("validateOnly", validateOnly) .toString();