Skip to content

Commit

Permalink
[SPARK-38721][SQL][TESTS] Test the error class: CANNOT_PARSE_DECIMAL
Browse files Browse the repository at this point in the history
## What changes were proposed in this pull request?
This pr aims to Add one test for the error class CANNOT_PARSE_DECIMAL to QueryExecutionErrorsSuite, it's a followup of [SPARK-37935](https://issues.apache.org/jira/browse/SPARK-37935).

### Why are the changes needed?
Add one test for the error class CANNOT_PARSE_DECIMAL to QueryExecutionErrorsSuite.

### Does this PR introduce any user-facing change?
No

### How was this patch tested?
- Manual test:

```
build/sbt "sql/testOnly *QueryExecutionErrorsSuite*"
```

All tests passed.

Closes apache#36192 from panbingkun/SPARK-38721.

Authored-by: panbingkun <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
  • Loading branch information
panbingkun authored and MaxGekk committed Apr 14, 2022
1 parent 04586a9 commit 22cd0c2
Showing 1 changed file with 48 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,19 @@

package org.apache.spark.sql.errors

import org.apache.spark.{SparkArithmeticException, SparkException, SparkRuntimeException, SparkUnsupportedOperationException, SparkUpgradeException}
import java.util.Locale

import org.apache.spark.{SparkArithmeticException, SparkException, SparkIllegalStateException, SparkRuntimeException, SparkUnsupportedOperationException, SparkUpgradeException}
import org.apache.spark.sql.{DataFrame, QueryTest}
import org.apache.spark.sql.catalyst.util.BadRecordException
import org.apache.spark.sql.execution.QueryExecutionException
import org.apache.spark.sql.execution.datasources.orc.OrcTest
import org.apache.spark.sql.execution.datasources.parquet.ParquetTest
import org.apache.spark.sql.functions.{lit, lower, struct, sum}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.EXCEPTION
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{StructType, TimestampType}
import org.apache.spark.sql.types.{DecimalType, StructType, TimestampType}
import org.apache.spark.sql.util.ArrowUtils

class QueryExecutionErrorsSuite extends QueryTest
Expand Down Expand Up @@ -278,4 +282,46 @@ class QueryExecutionErrorsSuite extends QueryTest
assert(e.getMessage ===
"Datetime operation overflow: add 1000000 YEAR to TIMESTAMP '2022-03-09 01:02:03'.")
}

test("CANNOT_PARSE_DECIMAL: unparseable decimal") {
val e1 = intercept[SparkException] {
withTempPath { path =>

// original text
val df1 = Seq(
"money",
"\"$92,807.99\""
).toDF()

df1.coalesce(1).write.text(path.getAbsolutePath)

val schema = new StructType().add("money", DecimalType.DoubleDecimal)
spark
.read
.schema(schema)
.format("csv")
.option("header", "true")
.option("locale", Locale.ROOT.toLanguageTag)
.option("multiLine", "true")
.option("inferSchema", "false")
.option("mode", "FAILFAST")
.load(path.getAbsolutePath).select($"money").collect()
}
}
assert(e1.getCause.isInstanceOf[QueryExecutionException])

val e2 = e1.getCause.asInstanceOf[QueryExecutionException]
assert(e2.getCause.isInstanceOf[SparkException])

val e3 = e2.getCause.asInstanceOf[SparkException]
assert(e3.getCause.isInstanceOf[BadRecordException])

val e4 = e3.getCause.asInstanceOf[BadRecordException]
assert(e4.getCause.isInstanceOf[SparkIllegalStateException])

val e5 = e4.getCause.asInstanceOf[SparkIllegalStateException]
assert(e5.getErrorClass === "CANNOT_PARSE_DECIMAL")
assert(e5.getSqlState === "42000")
assert(e5.getMessage === "Cannot parse decimal")
}
}

0 comments on commit 22cd0c2

Please sign in to comment.