Skip to content

Commit c7df014

Browse files
xupefeiHyukjinKwon
authored andcommitted
[SPARK-49273][CONNECT][SQL] Origin support for Spark Connect Scala client
### What changes were proposed in this pull request? This PR adds support for `Origin` to the Scala client of Spark Connect. The approach is to send the client `Origin` instance over the wire to the Server, which will reconstruct the instance and attach it to the expression during transformation. By default, only the last element in the stack trace is sent to the server. This is in line with the Python implementation and can be controlled via a Spark config `spark.sql.stackTracesInDataFrameContext`. One Origin message can contain only one of `PythonOrigin` and `JvmOrigin`. The former is filled by the Python client (#46789) and the latter by the Scala client (this PR). ### Why are the changes needed? Close the gap between Classic & Connect. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? New tests. ### Was this patch authored or co-authored using generative AI tooling? Yes, to generate protobuf messages. Closes #49373 from xupefei/connect-origin. Authored-by: Paddy Xu <xupaddy@gmail.com> Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
1 parent 95ec08e commit c7df014

File tree

1,303 files changed

+37792
-47
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,303 files changed

+37792
-47
lines changed

connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/columnNodeSupport.scala

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@ object ColumnNodeToProtoConverter extends (ColumnNode => proto.Expression) {
4444

4545
private def apply(node: ColumnNode, e: Option[Encoder[_]]): proto.Expression = {
4646
val builder = proto.Expression.newBuilder()
47-
// TODO(SPARK-49273) support Origin in Connect Scala Client.
4847
node match {
4948
case Literal(value, None, _) =>
5049
builder.setLiteral(toLiteralProtoBuilder(value))
@@ -189,6 +188,38 @@ object ColumnNodeToProtoConverter extends (ColumnNode => proto.Expression) {
189188
case node =>
190189
throw SparkException.internalError("Unsupported ColumnNode: " + node)
191190
}
191+
if (node.origin != Origin()) {
192+
builder.setCommon(proto.ExpressionCommon.newBuilder().setOrigin(convertOrigin(node.origin)))
193+
}
194+
builder.build()
195+
}
196+
197+
private def convertOrigin(origin: Origin): proto.Origin = {
198+
val jvmOrigin = proto.JvmOrigin.newBuilder()
199+
origin.line.map(jvmOrigin.setLine)
200+
origin.startPosition.map(jvmOrigin.setStartPosition)
201+
origin.startIndex.map(jvmOrigin.setStartIndex)
202+
origin.stopIndex.map(jvmOrigin.setStopIndex)
203+
origin.sqlText.map(jvmOrigin.setSqlText)
204+
origin.objectType.map(jvmOrigin.setObjectType)
205+
origin.objectName.map(jvmOrigin.setObjectName)
206+
207+
origin.stackTrace
208+
.map(_.map(convertStackTraceElement).toSeq.asJava)
209+
.map(jvmOrigin.addAllStackTrace)
210+
211+
proto.Origin.newBuilder().setJvmOrigin(jvmOrigin).build()
212+
}
213+
214+
private def convertStackTraceElement(stack: StackTraceElement): proto.StackTraceElement = {
215+
val builder = proto.StackTraceElement.newBuilder()
216+
Option(stack.getClassLoaderName).map(builder.setClassLoaderName)
217+
Option(stack.getModuleName).map(builder.setModuleName)
218+
Option(stack.getModuleVersion).map(builder.setModuleVersion)
219+
Option(stack.getClassName).map(builder.setDeclaringClass)
220+
Option(stack.getMethodName).map(builder.setMethodName)
221+
Option(stack.getFileName).map(builder.setFileName)
222+
Option(stack.getLineNumber).map(builder.setLineNumber)
192223
builder.build()
193224
}
194225

connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientDatasetSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,11 +143,12 @@ class ClientDatasetSuite extends ConnectFunSuite with BeforeAndAfterEach {
143143
test("write V2") {
144144
val df = ss.newDataFrame(_ => ()).limit(10)
145145

146+
val partCol = col("col99")
146147
val builder = proto.WriteOperationV2.newBuilder()
147148
builder
148149
.setInput(df.plan.getRoot)
149150
.setTableName("t1")
150-
.addPartitioningColumns(toExpr(col("col99")))
151+
.addPartitioningColumns(toExpr(partCol))
151152
.setProvider("json")
152153
.addClusteringColumns("col3")
153154
.putTableProperties("key", "value")
@@ -160,7 +161,7 @@ class ClientDatasetSuite extends ConnectFunSuite with BeforeAndAfterEach {
160161
.build()
161162

162163
df.writeTo("t1")
163-
.partitionedBy(col("col99"))
164+
.partitionedBy(partCol)
164165
.clusterBy("col3")
165166
.using("json")
166167
.tableProperty("key", "value")

connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameSubquerySuite.scala

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,8 @@ class DataFrameSubquerySuite extends QueryTest with RemoteSparkSession {
5656
checkError(
5757
intercept[AnalysisException](spark.range(1).select($"outer_col".outer()).collect()),
5858
"UNRESOLVED_COLUMN.WITH_SUGGESTION",
59-
parameters = Map("objectName" -> "`outer_col`", "proposal" -> "`id`"))
59+
parameters = Map("objectName" -> "`outer_col`", "proposal" -> "`id`"),
60+
context = ExpectedContext(fragment = "$", callSitePattern = getCurrentClassCallSitePattern))
6061
}
6162

6263
test("simple uncorrelated scalar subquery") {
@@ -637,14 +638,18 @@ class DataFrameSubquerySuite extends QueryTest with RemoteSparkSession {
637638
},
638639
"UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.UNSUPPORTED_IN_EXISTS_SUBQUERY",
639640
parameters = Map("treeNode" -> "(?s)'Unpivot.*"),
640-
matchPVals = true)
641+
matchPVals = true,
642+
queryContext = Array(
643+
ExpectedContext(fragment = "exists", callSitePattern = getCurrentClassCallSitePattern)))
641644
checkError(
642645
intercept[AnalysisException] {
643646
t1.unpivot(Array($"c1"), Array(t2.exists()), "c1", "c2").collect()
644647
},
645648
"UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.UNSUPPORTED_IN_EXISTS_SUBQUERY",
646649
parameters = Map("treeNode" -> "(?s)Expand.*"),
647-
matchPVals = true)
650+
matchPVals = true,
651+
queryContext = Array(
652+
ExpectedContext(fragment = "exists", callSitePattern = getCurrentClassCallSitePattern)))
648653
}
649654
}
650655

connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala

Lines changed: 48 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import scala.collection.{immutable, mutable}
2424
import scala.jdk.CollectionConverters._
2525
import scala.util.{Failure, Success, Try}
2626

27+
import com.google.protobuf
2728
import com.google.protobuf.util.JsonFormat
2829
import com.google.protobuf.util.JsonFormat.TypeRegistry
2930
import io.grpc.inprocess.InProcessChannelBuilder
@@ -146,7 +147,7 @@ class PlanGenerationTestSuite
146147
}
147148

148149
private def test(name: String)(f: => Dataset[_]): Unit = super.test(name) {
149-
val actual = f.plan.getRoot
150+
val actual = trimJvmOriginFields(f.plan.getRoot)
150151
val goldenFile = queryFilePath.resolve(name.replace(' ', '_') + ".proto.bin")
151152
Try(readRelation(goldenFile)) match {
152153
case Success(expected) if expected == actual =>
@@ -158,10 +159,10 @@ class PlanGenerationTestSuite
158159
fail(s"""
159160
|Expected and actual plans do not match:
160161
|
161-
|=== Expected Plan ===
162+
|=== Expected Plan (with excess fields trimmed) ===
162163
|$expected
163164
|
164-
|=== Actual Plan ===
165+
|=== Actual Plan (with excess fields trimmed) ===
165166
|$actual
166167
|""".stripMargin)
167168
case Failure(_) if regenerateGoldenFiles =>
@@ -198,6 +199,50 @@ class PlanGenerationTestSuite
198199
}
199200
}
200201

202+
private def trimJvmOriginFields[T <: protobuf.Message](message: T): T = {
203+
def trim(builder: proto.JvmOrigin.Builder): Unit = {
204+
builder
205+
.clearLine()
206+
.clearStartPosition()
207+
.clearStartIndex()
208+
.clearStopIndex()
209+
val trimmedStackTraces = builder.getStackTraceBuilderList.asScala.map { element =>
210+
element.clearLineNumber()
211+
if (element.getMethodName != null && element.getMethodName.startsWith("$anonfun")) {
212+
// Anonymous functions contain an sequence ID that is not stable.
213+
element.setMethodName("~~trimmed~anonfun~~")
214+
}
215+
element.build()
216+
}
217+
builder.clearStackTrace().addAllStackTrace(trimmedStackTraces.asJava)
218+
}
219+
220+
val builder = message.toBuilder
221+
222+
builder match {
223+
case exp: proto.Relation.Builder
224+
if exp.hasCommon && exp.getCommon.hasOrigin && exp.getCommon.getOrigin.hasJvmOrigin =>
225+
trim(exp.getCommonBuilder.getOriginBuilder.getJvmOriginBuilder)
226+
case exp: proto.Expression.Builder
227+
if exp.hasCommon && exp.getCommon.hasOrigin && exp.getCommon.getOrigin.hasJvmOrigin =>
228+
trim(exp.getCommonBuilder.getOriginBuilder.getJvmOriginBuilder)
229+
case _ => // Other stuff that does not have origin
230+
}
231+
232+
builder.getAllFields.asScala.foreach {
233+
case (desc, msg: protobuf.Message) =>
234+
builder.setField(desc, trimJvmOriginFields(msg))
235+
case (desc, list: java.util.List[_]) =>
236+
val newList = list.asScala.map {
237+
case msg: protobuf.Message => trimJvmOriginFields(msg)
238+
case other => other // Primitive types
239+
}
240+
builder.setField(desc, newList.asJava)
241+
case _ => // Primitive types
242+
}
243+
builder.build().asInstanceOf[T]
244+
}
245+
201246
private val urlWithUserAndPass = "jdbc:h2:mem:testdb0;user=testUser;password=testPass"
202247

203248
private val simpleSchema = new StructType()

connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/UserDefinedFunctionSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,14 @@ class UserDefinedFunctionSuite extends ConnectFunSuite {
3030
test("udf and encoder serialization") {
3131
def func(x: Int): Int = x + 1
3232

33+
val dummyCol = Column("dummy")
3334
val myUdf = udf(func _)
34-
val colWithUdf = myUdf(Column("dummy"))
35+
val colWithUdf = myUdf(dummyCol)
3536

3637
val udfExpr = toExpr(colWithUdf).getCommonInlineUserDefinedFunction
3738
assert(udfExpr.getDeterministic)
3839
assert(udfExpr.getArgumentsCount == 1)
39-
assert(udfExpr.getArguments(0) == toExpr(Column("dummy")))
40+
assert(udfExpr.getArguments(0) == toExpr(dummyCol))
4041
val udfObj = udfExpr.getScalarScalaUdf
4142

4243
assert(!udfObj.getNullable)

connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/internal/ColumnNodeToProtoConverterSuite.scala

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -412,17 +412,30 @@ class ColumnNodeToProtoConverterSuite extends ConnectFunSuite {
412412
.setNullable(false)
413413
.setAggregate(true)))
414414

415-
val result = ColumnNodeToProtoConverter.toTypedExpr(
416-
Column(InvokeInlineUserDefinedFunction(aggregator, Nil)),
417-
PrimitiveLongEncoder)
418-
val expected = expr(
419-
_.getTypedAggregateExpressionBuilder.getScalarScalaUdfBuilder
415+
val invokeColumn = Column(InvokeInlineUserDefinedFunction(aggregator, Nil))
416+
val result = ColumnNodeToProtoConverter.toTypedExpr(invokeColumn, PrimitiveLongEncoder)
417+
val expected = expr { builder =>
418+
builder.getTypedAggregateExpressionBuilder.getScalarScalaUdfBuilder
420419
.setPayload(UdfToProtoUtils
421420
.toUdfPacketBytes(aggregator, PrimitiveLongEncoder :: Nil, PrimitiveLongEncoder))
422421
.addInputTypes(ProtoDataTypes.LongType)
423422
.setOutputType(ProtoDataTypes.LongType)
424423
.setNullable(true)
425-
.setAggregate(true))
424+
.setAggregate(true)
425+
val origin = builder.getCommonBuilder.getOriginBuilder.getJvmOriginBuilder
426+
invokeColumn.node.origin.stackTrace.map {
427+
_.foreach { element =>
428+
origin.addStackTrace(
429+
proto.StackTraceElement
430+
.newBuilder()
431+
.setClassLoaderName(element.getClassLoaderName)
432+
.setDeclaringClass(element.getClassName)
433+
.setMethodName(element.getMethodName)
434+
.setFileName(element.getFileName)
435+
.setLineNumber(element.getLineNumber))
436+
}
437+
}
438+
}
426439
assert(result == expected)
427440
}
428441

@@ -434,6 +447,28 @@ class ColumnNodeToProtoConverterSuite extends ConnectFunSuite {
434447
test("unsupported") {
435448
intercept[SparkException](ColumnNodeToProtoConverter(Nope()))
436449
}
450+
451+
test("origin") {
452+
val origin = Origin(
453+
line = Some(1),
454+
sqlText = Some("lol"),
455+
stackTrace = Some(Array(new StackTraceElement("a", "b", "c", 9))))
456+
testConversion(
457+
SqlExpression("1 + 1", origin),
458+
expr { builder =>
459+
builder.getExpressionStringBuilder.setExpression("1 + 1")
460+
builder.getCommonBuilder.getOriginBuilder.getJvmOriginBuilder
461+
.setLine(1)
462+
.setSqlText("lol")
463+
.addStackTrace(
464+
proto.StackTraceElement
465+
.newBuilder()
466+
.setDeclaringClass("a")
467+
.setMethodName("b")
468+
.setFileName("c")
469+
.setLineNumber(9))
470+
})
471+
}
437472
}
438473

439474
private[internal] case class Nope(override val origin: Origin = CurrentOrigin.get)

connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/QueryTest.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,8 @@ abstract class QueryTest extends ConnectFunSuite with SQLHelper {
205205

206206
protected def getCurrentClassCallSitePattern: String = {
207207
val cs = Thread.currentThread().getStackTrace()(2)
208-
s"${cs.getClassName}\\..*\\(${cs.getFileName}:\\d+\\)"
208+
// {classloader}//{class.name}({file_name.scala}:{line_number})
209+
s".*//${cs.getClassName}\\..*\\(${cs.getFileName}:\\d+\\)"
209210
}
210211

211212
/**

python/pyspark/sql/connect/proto/common_pb2.py

Lines changed: 21 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535

3636

3737
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
38-
b'\n\x1aspark/connect/common.proto\x12\rspark.connect"\xb0\x01\n\x0cStorageLevel\x12\x19\n\x08use_disk\x18\x01 \x01(\x08R\x07useDisk\x12\x1d\n\nuse_memory\x18\x02 \x01(\x08R\tuseMemory\x12 \n\x0cuse_off_heap\x18\x03 \x01(\x08R\nuseOffHeap\x12"\n\x0c\x64\x65serialized\x18\x04 \x01(\x08R\x0c\x64\x65serialized\x12 \n\x0breplication\x18\x05 \x01(\x05R\x0breplication"G\n\x13ResourceInformation\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1c\n\taddresses\x18\x02 \x03(\tR\taddresses"\xc3\x01\n\x17\x45xecutorResourceRequest\x12#\n\rresource_name\x18\x01 \x01(\tR\x0cresourceName\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12.\n\x10\x64iscovery_script\x18\x03 \x01(\tH\x00R\x0f\x64iscoveryScript\x88\x01\x01\x12\x1b\n\x06vendor\x18\x04 \x01(\tH\x01R\x06vendor\x88\x01\x01\x42\x13\n\x11_discovery_scriptB\t\n\x07_vendor"R\n\x13TaskResourceRequest\x12#\n\rresource_name\x18\x01 \x01(\tR\x0cresourceName\x12\x16\n\x06\x61mount\x18\x02 \x01(\x01R\x06\x61mount"\xa5\x03\n\x0fResourceProfile\x12\x64\n\x12\x65xecutor_resources\x18\x01 \x03(\x0b\x32\x35.spark.connect.ResourceProfile.ExecutorResourcesEntryR\x11\x65xecutorResources\x12X\n\x0etask_resources\x18\x02 \x03(\x0b\x32\x31.spark.connect.ResourceProfile.TaskResourcesEntryR\rtaskResources\x1al\n\x16\x45xecutorResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12<\n\x05value\x18\x02 \x01(\x0b\x32&.spark.connect.ExecutorResourceRequestR\x05value:\x02\x38\x01\x1a\x64\n\x12TaskResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32".spark.connect.TaskResourceRequestR\x05value:\x02\x38\x01"X\n\x06Origin\x12\x42\n\rpython_origin\x18\x01 \x01(\x0b\x32\x1b.spark.connect.PythonOriginH\x00R\x0cpythonOriginB\n\n\x08\x66unction"G\n\x0cPythonOrigin\x12\x1a\n\x08\x66ragment\x18\x01 \x01(\tR\x08\x66ragment\x12\x1b\n\tcall_site\x18\x02 \x01(\tR\x08\x63\x61llSite"\x1f\n\x05\x42ools\x12\x16\n\x06values\x18\x01 \x03(\x08R\x06values"\x1e\n\x04Ints\x12\x16\n\x06values\x18\x01 \x03(\x05R\x06values"\x1f\n\x05Longs\x12\x16\n\x06values\x18\x01 \x03(\x03R\x06values" \n\x06\x46loats\x12\x16\n\x06values\x18\x01 \x03(\x02R\x06values"!\n\x07\x44oubles\x12\x16\n\x06values\x18\x01 \x03(\x01R\x06values"!\n\x07Strings\x12\x16\n\x06values\x18\x01 \x03(\tR\x06valuesB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
38+
b'\n\x1aspark/connect/common.proto\x12\rspark.connect"\xb0\x01\n\x0cStorageLevel\x12\x19\n\x08use_disk\x18\x01 \x01(\x08R\x07useDisk\x12\x1d\n\nuse_memory\x18\x02 \x01(\x08R\tuseMemory\x12 \n\x0cuse_off_heap\x18\x03 \x01(\x08R\nuseOffHeap\x12"\n\x0c\x64\x65serialized\x18\x04 \x01(\x08R\x0c\x64\x65serialized\x12 \n\x0breplication\x18\x05 \x01(\x05R\x0breplication"G\n\x13ResourceInformation\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1c\n\taddresses\x18\x02 \x03(\tR\taddresses"\xc3\x01\n\x17\x45xecutorResourceRequest\x12#\n\rresource_name\x18\x01 \x01(\tR\x0cresourceName\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12.\n\x10\x64iscovery_script\x18\x03 \x01(\tH\x00R\x0f\x64iscoveryScript\x88\x01\x01\x12\x1b\n\x06vendor\x18\x04 \x01(\tH\x01R\x06vendor\x88\x01\x01\x42\x13\n\x11_discovery_scriptB\t\n\x07_vendor"R\n\x13TaskResourceRequest\x12#\n\rresource_name\x18\x01 \x01(\tR\x0cresourceName\x12\x16\n\x06\x61mount\x18\x02 \x01(\x01R\x06\x61mount"\xa5\x03\n\x0fResourceProfile\x12\x64\n\x12\x65xecutor_resources\x18\x01 \x03(\x0b\x32\x35.spark.connect.ResourceProfile.ExecutorResourcesEntryR\x11\x65xecutorResources\x12X\n\x0etask_resources\x18\x02 \x03(\x0b\x32\x31.spark.connect.ResourceProfile.TaskResourcesEntryR\rtaskResources\x1al\n\x16\x45xecutorResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12<\n\x05value\x18\x02 \x01(\x0b\x32&.spark.connect.ExecutorResourceRequestR\x05value:\x02\x38\x01\x1a\x64\n\x12TaskResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32".spark.connect.TaskResourceRequestR\x05value:\x02\x38\x01"\x93\x01\n\x06Origin\x12\x42\n\rpython_origin\x18\x01 \x01(\x0b\x32\x1b.spark.connect.PythonOriginH\x00R\x0cpythonOrigin\x12\x39\n\njvm_origin\x18\x02 \x01(\x0b\x32\x18.spark.connect.JvmOriginH\x00R\tjvmOriginB\n\n\x08\x66unction"G\n\x0cPythonOrigin\x12\x1a\n\x08\x66ragment\x18\x01 \x01(\tR\x08\x66ragment\x12\x1b\n\tcall_site\x18\x02 \x01(\tR\x08\x63\x61llSite"\xb1\x03\n\tJvmOrigin\x12\x17\n\x04line\x18\x01 \x01(\x05H\x00R\x04line\x88\x01\x01\x12*\n\x0estart_position\x18\x02 \x01(\x05H\x01R\rstartPosition\x88\x01\x01\x12$\n\x0bstart_index\x18\x03 \x01(\x05H\x02R\nstartIndex\x88\x01\x01\x12"\n\nstop_index\x18\x04 \x01(\x05H\x03R\tstopIndex\x88\x01\x01\x12\x1e\n\x08sql_text\x18\x05 \x01(\tH\x04R\x07sqlText\x88\x01\x01\x12$\n\x0bobject_type\x18\x06 \x01(\tH\x05R\nobjectType\x88\x01\x01\x12$\n\x0bobject_name\x18\x07 \x01(\tH\x06R\nobjectName\x88\x01\x01\x12\x41\n\x0bstack_trace\x18\x08 \x03(\x0b\x32 .spark.connect.StackTraceElementR\nstackTraceB\x07\n\x05_lineB\x11\n\x0f_start_positionB\x0e\n\x0c_start_indexB\r\n\x0b_stop_indexB\x0b\n\t_sql_textB\x0e\n\x0c_object_typeB\x0e\n\x0c_object_name"\xea\x02\n\x11StackTraceElement\x12/\n\x11\x63lass_loader_name\x18\x01 \x01(\tH\x00R\x0f\x63lassLoaderName\x88\x01\x01\x12$\n\x0bmodule_name\x18\x02 \x01(\tH\x01R\nmoduleName\x88\x01\x01\x12*\n\x0emodule_version\x18\x03 \x01(\tH\x02R\rmoduleVersion\x88\x01\x01\x12\'\n\x0f\x64\x65\x63laring_class\x18\x04 \x01(\tR\x0e\x64\x65\x63laringClass\x12\x1f\n\x0bmethod_name\x18\x05 \x01(\tR\nmethodName\x12 \n\tfile_name\x18\x06 \x01(\tH\x03R\x08\x66ileName\x88\x01\x01\x12\x1f\n\x0bline_number\x18\x07 \x01(\x05R\nlineNumberB\x14\n\x12_class_loader_nameB\x0e\n\x0c_module_nameB\x11\n\x0f_module_versionB\x0c\n\n_file_name"\x1f\n\x05\x42ools\x12\x16\n\x06values\x18\x01 \x03(\x08R\x06values"\x1e\n\x04Ints\x12\x16\n\x06values\x18\x01 \x03(\x05R\x06values"\x1f\n\x05Longs\x12\x16\n\x06values\x18\x01 \x03(\x03R\x06values" \n\x06\x46loats\x12\x16\n\x06values\x18\x01 \x03(\x02R\x06values"!\n\x07\x44oubles\x12\x16\n\x06values\x18\x01 \x03(\x01R\x06values"!\n\x07Strings\x12\x16\n\x06values\x18\x01 \x03(\tR\x06valuesB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
3939
)
4040

4141
_globals = globals()
@@ -66,20 +66,24 @@
6666
_globals["_RESOURCEPROFILE_EXECUTORRESOURCESENTRY"]._serialized_end = 899
6767
_globals["_RESOURCEPROFILE_TASKRESOURCESENTRY"]._serialized_start = 901
6868
_globals["_RESOURCEPROFILE_TASKRESOURCESENTRY"]._serialized_end = 1001
69-
_globals["_ORIGIN"]._serialized_start = 1003
70-
_globals["_ORIGIN"]._serialized_end = 1091
71-
_globals["_PYTHONORIGIN"]._serialized_start = 1093
72-
_globals["_PYTHONORIGIN"]._serialized_end = 1164
73-
_globals["_BOOLS"]._serialized_start = 1166
74-
_globals["_BOOLS"]._serialized_end = 1197
75-
_globals["_INTS"]._serialized_start = 1199
76-
_globals["_INTS"]._serialized_end = 1229
77-
_globals["_LONGS"]._serialized_start = 1231
78-
_globals["_LONGS"]._serialized_end = 1262
79-
_globals["_FLOATS"]._serialized_start = 1264
80-
_globals["_FLOATS"]._serialized_end = 1296
81-
_globals["_DOUBLES"]._serialized_start = 1298
82-
_globals["_DOUBLES"]._serialized_end = 1331
83-
_globals["_STRINGS"]._serialized_start = 1333
84-
_globals["_STRINGS"]._serialized_end = 1366
69+
_globals["_ORIGIN"]._serialized_start = 1004
70+
_globals["_ORIGIN"]._serialized_end = 1151
71+
_globals["_PYTHONORIGIN"]._serialized_start = 1153
72+
_globals["_PYTHONORIGIN"]._serialized_end = 1224
73+
_globals["_JVMORIGIN"]._serialized_start = 1227
74+
_globals["_JVMORIGIN"]._serialized_end = 1660
75+
_globals["_STACKTRACEELEMENT"]._serialized_start = 1663
76+
_globals["_STACKTRACEELEMENT"]._serialized_end = 2025
77+
_globals["_BOOLS"]._serialized_start = 2027
78+
_globals["_BOOLS"]._serialized_end = 2058
79+
_globals["_INTS"]._serialized_start = 2060
80+
_globals["_INTS"]._serialized_end = 2090
81+
_globals["_LONGS"]._serialized_start = 2092
82+
_globals["_LONGS"]._serialized_end = 2123
83+
_globals["_FLOATS"]._serialized_start = 2125
84+
_globals["_FLOATS"]._serialized_end = 2157
85+
_globals["_DOUBLES"]._serialized_start = 2159
86+
_globals["_DOUBLES"]._serialized_end = 2192
87+
_globals["_STRINGS"]._serialized_start = 2194
88+
_globals["_STRINGS"]._serialized_end = 2227
8589
# @@protoc_insertion_point(module_scope)

0 commit comments

Comments
 (0)