Skip to content

Commit 3ef0362

Browse files
committed
[SPARK-55714][SQL] JDK might throw ArithmeticException without message
### What changes were proposed in this pull request? Canonicalize the error message of `SparkArithmeticException`: - null => "overflow" - "byte overflow", "long overflow", etc., to "overflow". - leave others as-is. Update tests to accept null message from `ArithmeticException` ### Why are the changes needed? During test, I found JDK 25 might throw `ArithmeticException` with `null` message, and it varies on different platforms / CPU models, for example, it's different on Apple M1 and x86 Server, and even different on Intel Skylake and AMD Zen4 (both are x86 but with different instructions support) https://bugs.openjdk.org/browse/JDK-8367990 > This is expected behavior. For "hot throws", the JIT will produce compiled code that does not go through the interpreter via deoptimization to throw an exception but throws a pre-allocated exception object without a stack trace or message. ... this optimization can be disabled with -XX:-OmitStackTraceInFastThrow. In other words, the error message is not something in the API contract. ### Does this PR introduce _any_ user-facing change? Some `ARITHMETIC_OVERFLOW` error condition message changes from "byte overflow", "long overflow", etc., to "overflow". ### How was this patch tested? Pass GHA for JDK 17. JDK 25 will be covered by daily tests. ### Was this patch authored or co-authored using generative AI tooling? No, the patch is crafted by hand. Closes #54514 from pan3793/SPARK-55714. Authored-by: Cheng Pan <chengpan@apache.org> Signed-off-by: Cheng Pan <chengpan@apache.org>
1 parent 7c9456f commit 3ef0362

File tree

17 files changed

+61
-52
lines changed

17 files changed

+61
-52
lines changed

sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,13 +119,22 @@ private[sql] trait ExecutionErrors extends DataTypeErrorsBase {
119119
message: String,
120120
suggestedFunc: String = "",
121121
context: QueryContext = null): ArithmeticException = {
122+
val canonicalMessage = message match {
123+
// For "hot throws", the JIT will produce compiled code that does not
124+
// go through the interpreter via deoptimization to throw an exception
125+
// but throws a pre-allocated exception object without a stack trace
126+
// or message. See https://bugs.openjdk.org/browse/JDK-8367990
127+
case null => "overflow"
128+
case m if m.contains("overflow") => "overflow"
129+
case m => m
130+
}
122131
val alternative = if (suggestedFunc.nonEmpty) {
123132
s" Use '$suggestedFunc' to tolerate overflow and return NULL instead."
124133
} else ""
125134
new SparkArithmeticException(
126135
errorClass = "ARITHMETIC_OVERFLOW",
127136
messageParameters = Map(
128-
"message" -> message,
137+
"message" -> canonicalMessage,
129138
"alternative" -> alternative,
130139
"config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)),
131140
context = getQueryContext(context),

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite with SQLHelper {
302302
val errMsg = intercept[ArithmeticException] {
303303
IntervalUtils.durationToMicros(Duration.ofSeconds(Long.MaxValue, Long.MaxValue))
304304
}.getMessage
305-
assert(errMsg.contains("long overflow"))
305+
assert(errMsg == null || errMsg.contains("overflow"))
306306
}
307307

308308
test("SPARK-35726: Truncate java.time.Duration by fields of day-time interval type") {
@@ -357,7 +357,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite with SQLHelper {
357357
val errMsg = intercept[ArithmeticException] {
358358
IntervalUtils.periodToMonths(Period.of(Int.MaxValue, Int.MaxValue, Int.MaxValue))
359359
}.getMessage
360-
assert(errMsg.contains("integer overflow"))
360+
assert(errMsg == null || errMsg.contains("overflow"))
361361
}
362362

363363
test("SPARK-35769: Truncate java.time.Period by fields of year-month interval type") {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,19 +119,19 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
119119
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
120120
checkErrorInExpression[SparkArithmeticException](
121121
UnaryMinus(Literal(Long.MinValue)), "ARITHMETIC_OVERFLOW",
122-
Map("message" -> "long overflow", "alternative" -> "",
122+
Map("message" -> "overflow", "alternative" -> "",
123123
"config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
124124
checkErrorInExpression[SparkArithmeticException](
125125
UnaryMinus(Literal(Int.MinValue)), "ARITHMETIC_OVERFLOW",
126-
Map("message" -> "integer overflow", "alternative" -> "",
126+
Map("message" -> "overflow", "alternative" -> "",
127127
"config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
128128
checkErrorInExpression[SparkArithmeticException](
129129
UnaryMinus(Literal(Short.MinValue)), "ARITHMETIC_OVERFLOW",
130-
Map("message" -> "short overflow", "alternative" -> "",
130+
Map("message" -> "overflow", "alternative" -> "",
131131
"config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
132132
checkErrorInExpression[SparkArithmeticException](
133133
UnaryMinus(Literal(Byte.MinValue)), "ARITHMETIC_OVERFLOW",
134-
Map("message" -> "byte overflow", "alternative" -> "",
134+
Map("message" -> "overflow", "alternative" -> "",
135135
"config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY)))
136136
checkEvaluation(UnaryMinus(positiveShortLit), (- positiveShort).toShort)
137137
checkEvaluation(UnaryMinus(negativeShortLit), (- negativeShort).toShort)

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1307,7 +1307,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
13071307
timeZoneId = Some(tz)),
13081308
Duration.ZERO)
13091309
}.getMessage
1310-
assert(errMsg.contains("overflow"))
1310+
assert(errMsg == null || errMsg.contains("overflow"))
13111311

13121312
Seq(false, true).foreach { legacy =>
13131313
checkConsistencyBetweenInterpretedAndCodegen(
@@ -1372,7 +1372,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
13721372
timeZoneId = Some(tz)),
13731373
Duration.ZERO)
13741374
}.getMessage
1375-
assert(errMsg.contains("overflow"))
1375+
assert(errMsg == null || errMsg.contains("overflow"))
13761376

13771377
Seq(false, true).foreach { legacy =>
13781378
checkConsistencyBetweenInterpretedAndCodegen(
@@ -1822,7 +1822,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
18221822
null)
18231823
}.getCause
18241824
assert(e.isInstanceOf[ArithmeticException])
1825-
assert(e.getMessage.contains("long overflow"))
1825+
assert(e.getMessage == null || e.getMessage.contains("overflow"))
18261826

18271827
checkEvaluation(
18281828
TimestampAddInterval(

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ class IntervalExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
155155
check("-100 years -1 millisecond", 0.5, "-50 years -500 microseconds")
156156
check("2 months 4 seconds", -0.5, "-1 months -2 seconds")
157157
check("1 month 2 microseconds", 1.5, "1 months 3 microseconds")
158-
check("2 months", Int.MaxValue, "integer overflow", Some(true))
158+
check("2 months", Int.MaxValue, "overflow", Some(true))
159159
check("2 months", Int.MaxValue, s"${Int.MaxValue} months", Some(false))
160160
}
161161

@@ -188,7 +188,7 @@ class IntervalExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
188188
check("1 month 3 microsecond", 1.5, "2 microseconds")
189189
check("1 second", 0, "Division by zero", Some(true))
190190
check("1 second", 0, null, Some(false))
191-
check(s"${Int.MaxValue} months", 0.9, "integer overflow", Some(true))
191+
check(s"${Int.MaxValue} months", 0.9, "overflow", Some(true))
192192
check(s"${Int.MaxValue} months", 0.9, s"${Int.MaxValue} months", Some(false))
193193
}
194194

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -832,7 +832,7 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
832832
val msg = intercept[ArithmeticException] {
833833
DateTimeUtils.localDateTimeToMicros(dt)
834834
}.getMessage
835-
assert(msg == "long overflow")
835+
assert(msg == null || msg.contains("overflow"))
836836
}
837837
}
838838

@@ -1445,7 +1445,7 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
14451445
},
14461446
condition = "ARITHMETIC_OVERFLOW",
14471447
parameters = Map(
1448-
"message" -> "long overflow",
1448+
"message" -> "overflow",
14491449
"alternative" -> "",
14501450
"config" -> toSQLConf(SqlApiConf.ANSI_ENABLED_KEY))
14511451
)

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -481,7 +481,7 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper {
481481
assert(divide(interval, 0.9) === new CalendarInterval(Int.MaxValue, Int.MaxValue,
482482
((Int.MaxValue / 9.0) * MICROS_PER_DAY).round))
483483
val e1 = intercept[ArithmeticException](divideExact(interval, 0.9))
484-
assert(e1.getMessage.contains("integer overflow"))
484+
assert(e1.getMessage.contains("overflow"))
485485

486486
interval = new CalendarInterval(123, 456, 789)
487487
assert(divide(interval, 0) === null)

sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@ org.apache.spark.SparkArithmeticException
203203
"messageParameters" : {
204204
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
205205
"config" : "\"spark.sql.ansi.enabled\"",
206-
"message" : "integer overflow"
206+
"message" : "overflow"
207207
},
208208
"queryContext" : [ {
209209
"objectType" : "",
@@ -238,7 +238,7 @@ org.apache.spark.SparkArithmeticException
238238
"messageParameters" : {
239239
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
240240
"config" : "\"spark.sql.ansi.enabled\"",
241-
"message" : "integer overflow"
241+
"message" : "overflow"
242242
},
243243
"queryContext" : [ {
244244
"objectType" : "",
@@ -273,7 +273,7 @@ org.apache.spark.SparkArithmeticException
273273
"messageParameters" : {
274274
"alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.",
275275
"config" : "\"spark.sql.ansi.enabled\"",
276-
"message" : "integer overflow"
276+
"message" : "overflow"
277277
},
278278
"queryContext" : [ {
279279
"objectType" : "",
@@ -309,7 +309,7 @@ org.apache.spark.SparkArithmeticException
309309
"messageParameters" : {
310310
"alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.",
311311
"config" : "\"spark.sql.ansi.enabled\"",
312-
"message" : "integer overflow"
312+
"message" : "overflow"
313313
},
314314
"queryContext" : [ {
315315
"objectType" : "",
@@ -345,7 +345,7 @@ org.apache.spark.SparkArithmeticException
345345
"messageParameters" : {
346346
"alternative" : " Use 'try_subtract' to tolerate overflow and return NULL instead.",
347347
"config" : "\"spark.sql.ansi.enabled\"",
348-
"message" : "integer overflow"
348+
"message" : "overflow"
349349
},
350350
"queryContext" : [ {
351351
"objectType" : "",
@@ -381,7 +381,7 @@ org.apache.spark.SparkArithmeticException
381381
"messageParameters" : {
382382
"alternative" : " Use 'try_subtract' to tolerate overflow and return NULL instead.",
383383
"config" : "\"spark.sql.ansi.enabled\"",
384-
"message" : "integer overflow"
384+
"message" : "overflow"
385385
},
386386
"queryContext" : [ {
387387
"objectType" : "",

sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,7 @@ org.apache.spark.SparkArithmeticException
395395
"messageParameters" : {
396396
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
397397
"config" : "\"spark.sql.ansi.enabled\"",
398-
"message" : "long overflow"
398+
"message" : "overflow"
399399
},
400400
"queryContext" : [ {
401401
"objectType" : "",
@@ -918,7 +918,7 @@ org.apache.spark.SparkArithmeticException
918918
"messageParameters" : {
919919
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
920920
"config" : "\"spark.sql.ansi.enabled\"",
921-
"message" : "long overflow"
921+
"message" : "overflow"
922922
},
923923
"queryContext" : [ {
924924
"objectType" : "",
@@ -958,7 +958,7 @@ org.apache.spark.SparkArithmeticException
958958
"messageParameters" : {
959959
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
960960
"config" : "\"spark.sql.ansi.enabled\"",
961-
"message" : "long overflow"
961+
"message" : "overflow"
962962
},
963963
"queryContext" : [ {
964964
"objectType" : "",
@@ -998,7 +998,7 @@ org.apache.spark.SparkArithmeticException
998998
"messageParameters" : {
999999
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
10001000
"config" : "\"spark.sql.ansi.enabled\"",
1001-
"message" : "long overflow"
1001+
"message" : "overflow"
10021002
},
10031003
"queryContext" : [ {
10041004
"objectType" : "",

sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out.java21

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,7 @@ org.apache.spark.SparkArithmeticException
395395
"messageParameters" : {
396396
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
397397
"config" : "\"spark.sql.ansi.enabled\"",
398-
"message" : "long overflow"
398+
"message" : "overflow"
399399
},
400400
"queryContext" : [ {
401401
"objectType" : "",
@@ -918,7 +918,7 @@ org.apache.spark.SparkArithmeticException
918918
"messageParameters" : {
919919
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
920920
"config" : "\"spark.sql.ansi.enabled\"",
921-
"message" : "long overflow"
921+
"message" : "overflow"
922922
},
923923
"queryContext" : [ {
924924
"objectType" : "",
@@ -958,7 +958,7 @@ org.apache.spark.SparkArithmeticException
958958
"messageParameters" : {
959959
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
960960
"config" : "\"spark.sql.ansi.enabled\"",
961-
"message" : "long overflow"
961+
"message" : "overflow"
962962
},
963963
"queryContext" : [ {
964964
"objectType" : "",
@@ -998,7 +998,7 @@ org.apache.spark.SparkArithmeticException
998998
"messageParameters" : {
999999
"alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.",
10001000
"config" : "\"spark.sql.ansi.enabled\"",
1001-
"message" : "long overflow"
1001+
"message" : "overflow"
10021002
},
10031003
"queryContext" : [ {
10041004
"objectType" : "",

0 commit comments

Comments
 (0)