diff --git a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/session_templates.proto b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/session_templates.proto
index 2e0181354cf..9b5c1090ac2 100644
--- a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/session_templates.proto
+++ b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/session_templates.proto
@@ -182,6 +182,10 @@ message SessionTemplate {
   oneof session_config {
     // Optional. Jupyter session config.
     JupyterConfig jupyter_session = 3 [(google.api.field_behavior) = OPTIONAL];
+
+    // Optional. Spark Connect session config.
+    SparkConnectConfig spark_connect_session = 11
+        [(google.api.field_behavior) = OPTIONAL];
   }
 
   // Output only. The email address of the user who created the template.
diff --git a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/sessions.proto b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/sessions.proto
index 59f9c43064a..8e836d79ebe 100644
--- a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/sessions.proto
+++ b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/sessions.proto
@@ -163,12 +163,13 @@ message ListSessionsRequest {
   // A filter is a logical expression constraining the values of various fields
   // in each session resource. Filters are case sensitive, and may contain
   // multiple clauses combined with logical operators (AND, OR).
-  // Supported fields are `session_id`, `session_uuid`, `state`, and
-  // `create_time`.
+  // Supported fields are `session_id`, `session_uuid`, `state`, `create_time`,
+  // and `labels`.
   //
   // Example: `state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`
   // is a filter for sessions in an ACTIVE state that were created before
-  // 2023-01-01.
+  // 2023-01-01. `state = ACTIVE and labels.environment=production` is a filter
+  // for sessions in an ACTIVE state that have a production environment label.
   //
   // See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed
   // description of the filter syntax and a list of supported comparators.
@@ -289,6 +290,10 @@ message Session {
   oneof session_config {
     // Optional. Jupyter session config.
     JupyterConfig jupyter_session = 4 [(google.api.field_behavior) = OPTIONAL];
+
+    // Optional. Spark Connect session config.
+    SparkConnectConfig spark_connect_session = 17
+        [(google.api.field_behavior) = OPTIONAL];
   }
 
   // Output only. Runtime information about session execution.
@@ -369,3 +374,6 @@ message JupyterConfig {
   // Optional. Display name, shown in the Jupyter kernelspec card.
   string display_name = 2 [(google.api.field_behavior) = OPTIONAL];
 }
+
+// Spark Connect configuration for an interactive session.
+message SparkConnectConfig {}
diff --git a/packages/google-cloud-dataproc/protos/protos.d.ts b/packages/google-cloud-dataproc/protos/protos.d.ts
index 79e15480693..37263eacb5c 100644
--- a/packages/google-cloud-dataproc/protos/protos.d.ts
+++ b/packages/google-cloud-dataproc/protos/protos.d.ts
@@ -15702,6 +15702,9 @@ export namespace google {
                     /** SessionTemplate jupyterSession */
                     jupyterSession?: (google.cloud.dataproc.v1.IJupyterConfig|null);
 
+                    /** SessionTemplate sparkConnectSession */
+                    sparkConnectSession?: (google.cloud.dataproc.v1.ISparkConnectConfig|null);
+
                     /** SessionTemplate creator */
                     creator?: (string|null);
 
@@ -15742,6 +15745,9 @@ export namespace google {
                     /** SessionTemplate jupyterSession. */
                     public jupyterSession?: (google.cloud.dataproc.v1.IJupyterConfig|null);
 
+                    /** SessionTemplate sparkConnectSession. */
+                    public sparkConnectSession?: (google.cloud.dataproc.v1.ISparkConnectConfig|null);
+
                     /** SessionTemplate creator. */
                     public creator: string;
 
@@ -15761,7 +15767,7 @@ export namespace google {
                     public uuid: string;
 
                     /** SessionTemplate sessionConfig. */
-                    public sessionConfig?: "jupyterSession";
+                    public sessionConfig?: ("jupyterSession"|"sparkConnectSession");
 
                     /**
                      * Creates a new SessionTemplate instance using the specified properties.
@@ -16621,6 +16627,9 @@ export namespace google {
                     /** Session jupyterSession */
                     jupyterSession?: (google.cloud.dataproc.v1.IJupyterConfig|null);
 
+                    /** Session sparkConnectSession */
+                    sparkConnectSession?: (google.cloud.dataproc.v1.ISparkConnectConfig|null);
+
                     /** Session runtimeInfo */
                     runtimeInfo?: (google.cloud.dataproc.v1.IRuntimeInfo|null);
 
@@ -16676,6 +16685,9 @@ export namespace google {
                     /** Session jupyterSession. */
                     public jupyterSession?: (google.cloud.dataproc.v1.IJupyterConfig|null);
 
+                    /** Session sparkConnectSession. */
+                    public sparkConnectSession?: (google.cloud.dataproc.v1.ISparkConnectConfig|null);
+
                     /** Session runtimeInfo. */
                     public runtimeInfo?: (google.cloud.dataproc.v1.IRuntimeInfo|null);
 
@@ -16710,7 +16722,7 @@ export namespace google {
                     public sessionTemplate: string;
 
                     /** Session sessionConfig. */
-                    public sessionConfig?: "jupyterSession";
+                    public sessionConfig?: ("jupyterSession"|"sparkConnectSession");
 
                     /**
                      * Creates a new Session instance using the specified properties.
@@ -17025,6 +17037,97 @@ export namespace google {
                     }
                 }
 
+                /** Properties of a SparkConnectConfig. */
+                interface ISparkConnectConfig {
+                }
+
+                /** Represents a SparkConnectConfig. */
+                class SparkConnectConfig implements ISparkConnectConfig {
+
+                    /**
+                     * Constructs a new SparkConnectConfig.
+                     * @param [properties] Properties to set
+                     */
+                    constructor(properties?: google.cloud.dataproc.v1.ISparkConnectConfig);
+
+                    /**
+                     * Creates a new SparkConnectConfig instance using the specified properties.
+                     * @param [properties] Properties to set
+                     * @returns SparkConnectConfig instance
+                     */
+                    public static create(properties?: google.cloud.dataproc.v1.ISparkConnectConfig): google.cloud.dataproc.v1.SparkConnectConfig;
+
+                    /**
+                     * Encodes the specified SparkConnectConfig message. Does not implicitly {@link google.cloud.dataproc.v1.SparkConnectConfig.verify|verify} messages.
+                     * @param message SparkConnectConfig message or plain object to encode
+                     * @param [writer] Writer to encode to
+                     * @returns Writer
+                     */
+                    public static encode(message: google.cloud.dataproc.v1.ISparkConnectConfig, writer?: $protobuf.Writer): $protobuf.Writer;
+
+                    /**
+                     * Encodes the specified SparkConnectConfig message, length delimited. Does not implicitly {@link google.cloud.dataproc.v1.SparkConnectConfig.verify|verify} messages.
+                     * @param message SparkConnectConfig message or plain object to encode
+                     * @param [writer] Writer to encode to
+                     * @returns Writer
+                     */
+                    public static encodeDelimited(message: google.cloud.dataproc.v1.ISparkConnectConfig, writer?: $protobuf.Writer): $protobuf.Writer;
+
+                    /**
+                     * Decodes a SparkConnectConfig message from the specified reader or buffer.
+                     * @param reader Reader or buffer to decode from
+                     * @param [length] Message length if known beforehand
+                     * @returns SparkConnectConfig
+                     * @throws {Error} If the payload is not a reader or valid buffer
+                     * @throws {$protobuf.util.ProtocolError} If required fields are missing
+                     */
+                    public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.dataproc.v1.SparkConnectConfig;
+
+                    /**
+                     * Decodes a SparkConnectConfig message from the specified reader or buffer, length delimited.
+                     * @param reader Reader or buffer to decode from
+                     * @returns SparkConnectConfig
+                     * @throws {Error} If the payload is not a reader or valid buffer
+                     * @throws {$protobuf.util.ProtocolError} If required fields are missing
+                     */
+                    public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.dataproc.v1.SparkConnectConfig;
+
+                    /**
+                     * Verifies a SparkConnectConfig message.
+                     * @param message Plain object to verify
+                     * @returns `null` if valid, otherwise the reason why it is not
+                     */
+                    public static verify(message: { [k: string]: any }): (string|null);
+
+                    /**
+                     * Creates a SparkConnectConfig message from a plain object. Also converts values to their respective internal types.
+                     * @param object Plain object
+                     * @returns SparkConnectConfig
+                     */
+                    public static fromObject(object: { [k: string]: any }): google.cloud.dataproc.v1.SparkConnectConfig;
+
+                    /**
+                     * Creates a plain object from a SparkConnectConfig message. Also converts values to other types if specified.
+                     * @param message SparkConnectConfig
+                     * @param [options] Conversion options
+                     * @returns Plain object
+                     */
+                    public static toObject(message: google.cloud.dataproc.v1.SparkConnectConfig, options?: $protobuf.IConversionOptions): { [k: string]: any };
+
+                    /**
+                     * Converts this SparkConnectConfig to JSON.
+                     * @returns JSON object
+                     */
+                    public toJSON(): { [k: string]: any };
+
+                    /**
+                     * Gets the default type url for SparkConnectConfig
+                     * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+                     * @returns The default type url
+                     */
+                    public static getTypeUrl(typeUrlPrefix?: string): string;
+                }
+
                 /** Represents a WorkflowTemplateService */
                 class WorkflowTemplateService extends $protobuf.rpc.Service {
 
diff --git a/packages/google-cloud-dataproc/protos/protos.js b/packages/google-cloud-dataproc/protos/protos.js
index 56b6a30027d..0b66cd3a96b 100644
--- a/packages/google-cloud-dataproc/protos/protos.js
+++ b/packages/google-cloud-dataproc/protos/protos.js
@@ -41292,6 +41292,7 @@
                          * @property {string|null} [description] SessionTemplate description
                          * @property {google.protobuf.ITimestamp|null} [createTime] SessionTemplate createTime
                          * @property {google.cloud.dataproc.v1.IJupyterConfig|null} [jupyterSession] SessionTemplate jupyterSession
+                         * @property {google.cloud.dataproc.v1.ISparkConnectConfig|null} [sparkConnectSession] SessionTemplate sparkConnectSession
                          * @property {string|null} [creator] SessionTemplate creator
                          * @property {Object.<string,string>|null} [labels] SessionTemplate labels
                          * @property {google.cloud.dataproc.v1.IRuntimeConfig|null} [runtimeConfig] SessionTemplate runtimeConfig
@@ -41348,6 +41349,14 @@
                          */
                         SessionTemplate.prototype.jupyterSession = null;
     
+                        /**
+                         * SessionTemplate sparkConnectSession.
+                         * @member {google.cloud.dataproc.v1.ISparkConnectConfig|null|undefined} sparkConnectSession
+                         * @memberof google.cloud.dataproc.v1.SessionTemplate
+                         * @instance
+                         */
+                        SessionTemplate.prototype.sparkConnectSession = null;
+    
                         /**
                          * SessionTemplate creator.
                          * @member {string} creator
@@ -41401,12 +41410,12 @@
     
                         /**
                          * SessionTemplate sessionConfig.
-                         * @member {"jupyterSession"|undefined} sessionConfig
+                         * @member {"jupyterSession"|"sparkConnectSession"|undefined} sessionConfig
                          * @memberof google.cloud.dataproc.v1.SessionTemplate
                          * @instance
                          */
                         Object.defineProperty(SessionTemplate.prototype, "sessionConfig", {
-                            get: $util.oneOfGetter($oneOfFields = ["jupyterSession"]),
+                            get: $util.oneOfGetter($oneOfFields = ["jupyterSession", "sparkConnectSession"]),
                             set: $util.oneOfSetter($oneOfFields)
                         });
     
@@ -41453,6 +41462,8 @@
                                 writer.uint32(/* id 9, wireType 2 =*/74).string(message.description);
                             if (message.updateTime != null && Object.hasOwnProperty.call(message, "updateTime"))
                                 $root.google.protobuf.Timestamp.encode(message.updateTime, writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim();
+                            if (message.sparkConnectSession != null && Object.hasOwnProperty.call(message, "sparkConnectSession"))
+                                $root.google.cloud.dataproc.v1.SparkConnectConfig.encode(message.sparkConnectSession, writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim();
                             if (message.uuid != null && Object.hasOwnProperty.call(message, "uuid"))
                                 writer.uint32(/* id 12, wireType 2 =*/98).string(message.uuid);
                             return writer;
@@ -41505,6 +41516,10 @@
                                         message.jupyterSession = $root.google.cloud.dataproc.v1.JupyterConfig.decode(reader, reader.uint32());
                                         break;
                                     }
+                                case 11: {
+                                        message.sparkConnectSession = $root.google.cloud.dataproc.v1.SparkConnectConfig.decode(reader, reader.uint32());
+                                        break;
+                                    }
                                 case 5: {
                                         message.creator = reader.string();
                                         break;
@@ -41603,6 +41618,16 @@
                                         return "jupyterSession." + error;
                                 }
                             }
+                            if (message.sparkConnectSession != null && message.hasOwnProperty("sparkConnectSession")) {
+                                if (properties.sessionConfig === 1)
+                                    return "sessionConfig: multiple values";
+                                properties.sessionConfig = 1;
+                                {
+                                    var error = $root.google.cloud.dataproc.v1.SparkConnectConfig.verify(message.sparkConnectSession);
+                                    if (error)
+                                        return "sparkConnectSession." + error;
+                                }
+                            }
                             if (message.creator != null && message.hasOwnProperty("creator"))
                                 if (!$util.isString(message.creator))
                                     return "creator: string expected";
@@ -41661,6 +41686,11 @@
                                     throw TypeError(".google.cloud.dataproc.v1.SessionTemplate.jupyterSession: object expected");
                                 message.jupyterSession = $root.google.cloud.dataproc.v1.JupyterConfig.fromObject(object.jupyterSession);
                             }
+                            if (object.sparkConnectSession != null) {
+                                if (typeof object.sparkConnectSession !== "object")
+                                    throw TypeError(".google.cloud.dataproc.v1.SessionTemplate.sparkConnectSession: object expected");
+                                message.sparkConnectSession = $root.google.cloud.dataproc.v1.SparkConnectConfig.fromObject(object.sparkConnectSession);
+                            }
                             if (object.creator != null)
                                 message.creator = String(object.creator);
                             if (object.labels) {
@@ -41740,6 +41770,11 @@
                                 object.description = message.description;
                             if (message.updateTime != null && message.hasOwnProperty("updateTime"))
                                 object.updateTime = $root.google.protobuf.Timestamp.toObject(message.updateTime, options);
+                            if (message.sparkConnectSession != null && message.hasOwnProperty("sparkConnectSession")) {
+                                object.sparkConnectSession = $root.google.cloud.dataproc.v1.SparkConnectConfig.toObject(message.sparkConnectSession, options);
+                                if (options.oneofs)
+                                    object.sessionConfig = "sparkConnectSession";
+                            }
                             if (message.uuid != null && message.hasOwnProperty("uuid"))
                                 object.uuid = message.uuid;
                             return object;
@@ -43440,6 +43475,7 @@
                          * @property {string|null} [uuid] Session uuid
                          * @property {google.protobuf.ITimestamp|null} [createTime] Session createTime
                          * @property {google.cloud.dataproc.v1.IJupyterConfig|null} [jupyterSession] Session jupyterSession
+                         * @property {google.cloud.dataproc.v1.ISparkConnectConfig|null} [sparkConnectSession] Session sparkConnectSession
                          * @property {google.cloud.dataproc.v1.IRuntimeInfo|null} [runtimeInfo] Session runtimeInfo
                          * @property {google.cloud.dataproc.v1.Session.State|null} [state] Session state
                          * @property {string|null} [stateMessage] Session stateMessage
@@ -43502,6 +43538,14 @@
                          */
                         Session.prototype.jupyterSession = null;
     
+                        /**
+                         * Session sparkConnectSession.
+                         * @member {google.cloud.dataproc.v1.ISparkConnectConfig|null|undefined} sparkConnectSession
+                         * @memberof google.cloud.dataproc.v1.Session
+                         * @instance
+                         */
+                        Session.prototype.sparkConnectSession = null;
+    
                         /**
                          * Session runtimeInfo.
                          * @member {google.cloud.dataproc.v1.IRuntimeInfo|null|undefined} runtimeInfo
@@ -43595,12 +43639,12 @@
     
                         /**
                          * Session sessionConfig.
-                         * @member {"jupyterSession"|undefined} sessionConfig
+                         * @member {"jupyterSession"|"sparkConnectSession"|undefined} sessionConfig
                          * @memberof google.cloud.dataproc.v1.Session
                          * @instance
                          */
                         Object.defineProperty(Session.prototype, "sessionConfig", {
-                            get: $util.oneOfGetter($oneOfFields = ["jupyterSession"]),
+                            get: $util.oneOfGetter($oneOfFields = ["jupyterSession", "sparkConnectSession"]),
                             set: $util.oneOfSetter($oneOfFields)
                         });
     
@@ -43660,6 +43704,8 @@
                                     $root.google.cloud.dataproc.v1.Session.SessionStateHistory.encode(message.stateHistory[i], writer.uint32(/* id 15, wireType 2 =*/122).fork()).ldelim();
                             if (message.sessionTemplate != null && Object.hasOwnProperty.call(message, "sessionTemplate"))
                                 writer.uint32(/* id 16, wireType 2 =*/130).string(message.sessionTemplate);
+                            if (message.sparkConnectSession != null && Object.hasOwnProperty.call(message, "sparkConnectSession"))
+                                $root.google.cloud.dataproc.v1.SparkConnectConfig.encode(message.sparkConnectSession, writer.uint32(/* id 17, wireType 2 =*/138).fork()).ldelim();
                             return writer;
                         };
     
@@ -43710,6 +43756,10 @@
                                         message.jupyterSession = $root.google.cloud.dataproc.v1.JupyterConfig.decode(reader, reader.uint32());
                                         break;
                                     }
+                                case 17: {
+                                        message.sparkConnectSession = $root.google.cloud.dataproc.v1.SparkConnectConfig.decode(reader, reader.uint32());
+                                        break;
+                                    }
                                 case 6: {
                                         message.runtimeInfo = $root.google.cloud.dataproc.v1.RuntimeInfo.decode(reader, reader.uint32());
                                         break;
@@ -43830,6 +43880,16 @@
                                         return "jupyterSession." + error;
                                 }
                             }
+                            if (message.sparkConnectSession != null && message.hasOwnProperty("sparkConnectSession")) {
+                                if (properties.sessionConfig === 1)
+                                    return "sessionConfig: multiple values";
+                                properties.sessionConfig = 1;
+                                {
+                                    var error = $root.google.cloud.dataproc.v1.SparkConnectConfig.verify(message.sparkConnectSession);
+                                    if (error)
+                                        return "sparkConnectSession." + error;
+                                }
+                            }
                             if (message.runtimeInfo != null && message.hasOwnProperty("runtimeInfo")) {
                                 var error = $root.google.cloud.dataproc.v1.RuntimeInfo.verify(message.runtimeInfo);
                                 if (error)
@@ -43920,6 +43980,11 @@
                                     throw TypeError(".google.cloud.dataproc.v1.Session.jupyterSession: object expected");
                                 message.jupyterSession = $root.google.cloud.dataproc.v1.JupyterConfig.fromObject(object.jupyterSession);
                             }
+                            if (object.sparkConnectSession != null) {
+                                if (typeof object.sparkConnectSession !== "object")
+                                    throw TypeError(".google.cloud.dataproc.v1.Session.sparkConnectSession: object expected");
+                                message.sparkConnectSession = $root.google.cloud.dataproc.v1.SparkConnectConfig.fromObject(object.sparkConnectSession);
+                            }
                             if (object.runtimeInfo != null) {
                                 if (typeof object.runtimeInfo !== "object")
                                     throw TypeError(".google.cloud.dataproc.v1.Session.runtimeInfo: object expected");
@@ -44071,6 +44136,11 @@
                             }
                             if (message.sessionTemplate != null && message.hasOwnProperty("sessionTemplate"))
                                 object.sessionTemplate = message.sessionTemplate;
+                            if (message.sparkConnectSession != null && message.hasOwnProperty("sparkConnectSession")) {
+                                object.sparkConnectSession = $root.google.cloud.dataproc.v1.SparkConnectConfig.toObject(message.sparkConnectSession, options);
+                                if (options.oneofs)
+                                    object.sessionConfig = "sparkConnectSession";
+                            }
                             return object;
                         };
     
@@ -44686,6 +44756,181 @@
                         return JupyterConfig;
                     })();
     
+                    v1.SparkConnectConfig = (function() {
+    
+                        /**
+                         * Properties of a SparkConnectConfig.
+                         * @memberof google.cloud.dataproc.v1
+                         * @interface ISparkConnectConfig
+                         */
+    
+                        /**
+                         * Constructs a new SparkConnectConfig.
+                         * @memberof google.cloud.dataproc.v1
+                         * @classdesc Represents a SparkConnectConfig.
+                         * @implements ISparkConnectConfig
+                         * @constructor
+                         * @param {google.cloud.dataproc.v1.ISparkConnectConfig=} [properties] Properties to set
+                         */
+                        function SparkConnectConfig(properties) {
+                            if (properties)
+                                for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
+                                    if (properties[keys[i]] != null)
+                                        this[keys[i]] = properties[keys[i]];
+                        }
+    
+                        /**
+                         * Creates a new SparkConnectConfig instance using the specified properties.
+                         * @function create
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {google.cloud.dataproc.v1.ISparkConnectConfig=} [properties] Properties to set
+                         * @returns {google.cloud.dataproc.v1.SparkConnectConfig} SparkConnectConfig instance
+                         */
+                        SparkConnectConfig.create = function create(properties) {
+                            return new SparkConnectConfig(properties);
+                        };
+    
+                        /**
+                         * Encodes the specified SparkConnectConfig message. Does not implicitly {@link google.cloud.dataproc.v1.SparkConnectConfig.verify|verify} messages.
+                         * @function encode
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {google.cloud.dataproc.v1.ISparkConnectConfig} message SparkConnectConfig message or plain object to encode
+                         * @param {$protobuf.Writer} [writer] Writer to encode to
+                         * @returns {$protobuf.Writer} Writer
+                         */
+                        SparkConnectConfig.encode = function encode(message, writer) {
+                            if (!writer)
+                                writer = $Writer.create();
+                            return writer;
+                        };
+    
+                        /**
+                         * Encodes the specified SparkConnectConfig message, length delimited. Does not implicitly {@link google.cloud.dataproc.v1.SparkConnectConfig.verify|verify} messages.
+                         * @function encodeDelimited
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {google.cloud.dataproc.v1.ISparkConnectConfig} message SparkConnectConfig message or plain object to encode
+                         * @param {$protobuf.Writer} [writer] Writer to encode to
+                         * @returns {$protobuf.Writer} Writer
+                         */
+                        SparkConnectConfig.encodeDelimited = function encodeDelimited(message, writer) {
+                            return this.encode(message, writer).ldelim();
+                        };
+    
+                        /**
+                         * Decodes a SparkConnectConfig message from the specified reader or buffer.
+                         * @function decode
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+                         * @param {number} [length] Message length if known beforehand
+                         * @returns {google.cloud.dataproc.v1.SparkConnectConfig} SparkConnectConfig
+                         * @throws {Error} If the payload is not a reader or valid buffer
+                         * @throws {$protobuf.util.ProtocolError} If required fields are missing
+                         */
+                        SparkConnectConfig.decode = function decode(reader, length) {
+                            if (!(reader instanceof $Reader))
+                                reader = $Reader.create(reader);
+                            var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.dataproc.v1.SparkConnectConfig();
+                            while (reader.pos < end) {
+                                var tag = reader.uint32();
+                                switch (tag >>> 3) {
+                                default:
+                                    reader.skipType(tag & 7);
+                                    break;
+                                }
+                            }
+                            return message;
+                        };
+    
+                        /**
+                         * Decodes a SparkConnectConfig message from the specified reader or buffer, length delimited.
+                         * @function decodeDelimited
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from
+                         * @returns {google.cloud.dataproc.v1.SparkConnectConfig} SparkConnectConfig
+                         * @throws {Error} If the payload is not a reader or valid buffer
+                         * @throws {$protobuf.util.ProtocolError} If required fields are missing
+                         */
+                        SparkConnectConfig.decodeDelimited = function decodeDelimited(reader) {
+                            if (!(reader instanceof $Reader))
+                                reader = new $Reader(reader);
+                            return this.decode(reader, reader.uint32());
+                        };
+    
+                        /**
+                         * Verifies a SparkConnectConfig message.
+                         * @function verify
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {Object.<string,*>} message Plain object to verify
+                         * @returns {string|null} `null` if valid, otherwise the reason why it is not
+                         */
+                        SparkConnectConfig.verify = function verify(message) {
+                            if (typeof message !== "object" || message === null)
+                                return "object expected";
+                            return null;
+                        };
+    
+                        /**
+                         * Creates a SparkConnectConfig message from a plain object. Also converts values to their respective internal types.
+                         * @function fromObject
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {Object.<string,*>} object Plain object
+                         * @returns {google.cloud.dataproc.v1.SparkConnectConfig} SparkConnectConfig
+                         */
+                        SparkConnectConfig.fromObject = function fromObject(object) {
+                            if (object instanceof $root.google.cloud.dataproc.v1.SparkConnectConfig)
+                                return object;
+                            return new $root.google.cloud.dataproc.v1.SparkConnectConfig();
+                        };
+    
+                        /**
+                         * Creates a plain object from a SparkConnectConfig message. Also converts values to other types if specified.
+                         * @function toObject
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {google.cloud.dataproc.v1.SparkConnectConfig} message SparkConnectConfig
+                         * @param {$protobuf.IConversionOptions} [options] Conversion options
+                         * @returns {Object.<string,*>} Plain object
+                         */
+                        SparkConnectConfig.toObject = function toObject() {
+                            return {};
+                        };
+    
+                        /**
+                         * Converts this SparkConnectConfig to JSON.
+                         * @function toJSON
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @instance
+                         * @returns {Object.<string,*>} JSON object
+                         */
+                        SparkConnectConfig.prototype.toJSON = function toJSON() {
+                            return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
+                        };
+    
+                        /**
+                         * Gets the default type url for SparkConnectConfig
+                         * @function getTypeUrl
+                         * @memberof google.cloud.dataproc.v1.SparkConnectConfig
+                         * @static
+                         * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com")
+                         * @returns {string} The default type url
+                         */
+                        SparkConnectConfig.getTypeUrl = function getTypeUrl(typeUrlPrefix) {
+                            if (typeUrlPrefix === undefined) {
+                                typeUrlPrefix = "type.googleapis.com";
+                            }
+                            return typeUrlPrefix + "/google.cloud.dataproc.v1.SparkConnectConfig";
+                        };
+    
+                        return SparkConnectConfig;
+                    })();
+    
                     v1.WorkflowTemplateService = (function() {
     
                         /**
diff --git a/packages/google-cloud-dataproc/protos/protos.json b/packages/google-cloud-dataproc/protos/protos.json
index 749c3a1ac01..6f24be22296 100644
--- a/packages/google-cloud-dataproc/protos/protos.json
+++ b/packages/google-cloud-dataproc/protos/protos.json
@@ -5350,7 +5350,8 @@
                       "oneofs": {
                         "sessionConfig": {
                           "oneof": [
-                            "jupyterSession"
+                            "jupyterSession",
+                            "sparkConnectSession"
                           ]
                         }
                       },
@@ -5383,6 +5384,13 @@
                             "(google.api.field_behavior)": "OPTIONAL"
                           }
                         },
+                        "sparkConnectSession": {
+                          "type": "SparkConnectConfig",
+                          "id": 11,
+                          "options": {
+                            "(google.api.field_behavior)": "OPTIONAL"
+                          }
+                        },
                         "creator": {
                           "type": "string",
                           "id": 5,
@@ -5694,7 +5702,8 @@
                       "oneofs": {
                         "sessionConfig": {
                           "oneof": [
-                            "jupyterSession"
+                            "jupyterSession",
+                            "sparkConnectSession"
                           ]
                         }
                       },
@@ -5727,6 +5736,13 @@
                             "(google.api.field_behavior)": "OPTIONAL"
                           }
                         },
+                        "sparkConnectSession": {
+                          "type": "SparkConnectConfig",
+                          "id": 17,
+                          "options": {
+                            "(google.api.field_behavior)": "OPTIONAL"
+                          }
+                        },
                         "runtimeInfo": {
                           "type": "RuntimeInfo",
                           "id": 6,
@@ -5873,6 +5889,9 @@
                         }
                       }
                     },
+                    "SparkConnectConfig": {
+                      "fields": {}
+                    },
                     "WorkflowTemplateService": {
                       "options": {
                         "(google.api.default_host)": "dataproc.googleapis.com",
diff --git a/packages/google-cloud-dataproc/samples/generated/v1/session_controller.list_sessions.js b/packages/google-cloud-dataproc/samples/generated/v1/session_controller.list_sessions.js
index 0a9afa1bc7f..40dde9d879c 100644
--- a/packages/google-cloud-dataproc/samples/generated/v1/session_controller.list_sessions.js
+++ b/packages/google-cloud-dataproc/samples/generated/v1/session_controller.list_sessions.js
@@ -47,11 +47,12 @@ function main(parent) {
    *  A filter is a logical expression constraining the values of various fields
    *  in each session resource. Filters are case sensitive, and may contain
    *  multiple clauses combined with logical operators (AND, OR).
-   *  Supported fields are `session_id`, `session_uuid`, `state`, and
-   *  `create_time`.
+   *  Supported fields are `session_id`, `session_uuid`, `state`, `create_time`,
+   *  and `labels`.
    *  Example: `state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`
    *  is a filter for sessions in an ACTIVE state that were created before
-   *  2023-01-01.
+   *  2023-01-01. `state = ACTIVE and labels.environment=production` is a filter
+   *  for sessions in an ACTIVE state that have a production environment label.
    *  See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed
    *  description of the filter syntax and a list of supported comparators.
    */
diff --git a/packages/google-cloud-dataproc/samples/generated/v1/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated/v1/snippet_metadata_google.cloud.dataproc.v1.json
index cefdc43a882..7dabdfcba50 100644
--- a/packages/google-cloud-dataproc/samples/generated/v1/snippet_metadata_google.cloud.dataproc.v1.json
+++ b/packages/google-cloud-dataproc/samples/generated/v1/snippet_metadata_google.cloud.dataproc.v1.json
@@ -1486,7 +1486,7 @@
             "segments": [
                 {
                     "start": 25,
-                    "end": 79,
+                    "end": 80,
                     "type": "FULL"
                 }
             ],
diff --git a/packages/google-cloud-dataproc/src/v1/session_controller_client.ts b/packages/google-cloud-dataproc/src/v1/session_controller_client.ts
index 3533995f1eb..d6d938f0021 100644
--- a/packages/google-cloud-dataproc/src/v1/session_controller_client.ts
+++ b/packages/google-cloud-dataproc/src/v1/session_controller_client.ts
@@ -1153,12 +1153,13 @@ export class SessionControllerClient {
    *   A filter is a logical expression constraining the values of various fields
    *   in each session resource. Filters are case sensitive, and may contain
    *   multiple clauses combined with logical operators (AND, OR).
-   *   Supported fields are `session_id`, `session_uuid`, `state`, and
-   *   `create_time`.
+   *   Supported fields are `session_id`, `session_uuid`, `state`, `create_time`,
+   *   and `labels`.
    *
    *   Example: `state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`
    *   is a filter for sessions in an ACTIVE state that were created before
-   *   2023-01-01.
+   *   2023-01-01. `state = ACTIVE and labels.environment=production` is a filter
+   *   for sessions in an ACTIVE state that have a production environment label.
    *
    *   See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed
    *   description of the filter syntax and a list of supported comparators.
@@ -1261,12 +1262,13 @@ export class SessionControllerClient {
    *   A filter is a logical expression constraining the values of various fields
    *   in each session resource. Filters are case sensitive, and may contain
    *   multiple clauses combined with logical operators (AND, OR).
-   *   Supported fields are `session_id`, `session_uuid`, `state`, and
-   *   `create_time`.
+   *   Supported fields are `session_id`, `session_uuid`, `state`, `create_time`,
+   *   and `labels`.
    *
    *   Example: `state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`
    *   is a filter for sessions in an ACTIVE state that were created before
-   *   2023-01-01.
+   *   2023-01-01. `state = ACTIVE and labels.environment=production` is a filter
+   *   for sessions in an ACTIVE state that have a production environment label.
    *
    *   See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed
    *   description of the filter syntax and a list of supported comparators.
@@ -1323,12 +1325,13 @@ export class SessionControllerClient {
    *   A filter is a logical expression constraining the values of various fields
    *   in each session resource. Filters are case sensitive, and may contain
    *   multiple clauses combined with logical operators (AND, OR).
-   *   Supported fields are `session_id`, `session_uuid`, `state`, and
-   *   `create_time`.
+   *   Supported fields are `session_id`, `session_uuid`, `state`, `create_time`,
+   *   and `labels`.
    *
    *   Example: `state = ACTIVE and create_time < "2023-01-01T00:00:00Z"`
    *   is a filter for sessions in an ACTIVE state that were created before
-   *   2023-01-01.
+   *   2023-01-01. `state = ACTIVE and labels.environment=production` is a filter
+   *   for sessions in an ACTIVE state that have a production environment label.
    *
    *   See https://google.aip.dev/assets/misc/ebnf-filtering.txt for a detailed
    *   description of the filter syntax and a list of supported comparators.