@@ -1154,6 +1154,30 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
11541154 }
11551155 }
11561156
1157+ test(" cast ArrayType to ArrayType" ) {
1158+ val types = Seq (
1159+ BooleanType ,
1160+ StringType ,
1161+ ByteType ,
1162+ IntegerType ,
1163+ LongType ,
1164+ ShortType ,
1165+ DecimalType (10 , 2 ),
1166+ DecimalType (38 , 18 ))
1167+ for (fromType <- types) {
1168+ for (toType <- types) {
1169+ if (fromType != toType &&
1170+ ! tags
1171+ .get(s " cast $fromType to $toType" )
1172+ .exists(s => s.contains(" org.scalatest.Ignore" )) &&
1173+ Cast .canCast(fromType, toType) &&
1174+ CometCast .isSupported(fromType, toType, None , CometEvalMode .LEGACY ) == Compatible ()) {
1175+ castTest(generateArrays(100 , fromType), ArrayType (toType))
1176+ }
1177+ }
1178+ }
1179+ }
1180+
11571181 private def generateFloats (): DataFrame = {
11581182 withNulls(gen.generateFloats(dataSize)).toDF(" a" )
11591183 }
@@ -1182,10 +1206,10 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
11821206 withNulls(gen.generateLongs(dataSize)).toDF(" a" )
11831207 }
11841208
1185- private def generateArrays (rowSize : Int , elementType : DataType ): DataFrame = {
1209+ private def generateArrays (rowNum : Int , elementType : DataType ): DataFrame = {
11861210 import scala .collection .JavaConverters ._
11871211 val schema = StructType (Seq (StructField (" a" , ArrayType (elementType), true )))
1188- spark.createDataFrame(gen.generateRows(rowSize , schema).asJava, schema)
1212+ spark.createDataFrame(gen.generateRows(rowNum , schema).asJava, schema)
11891213 }
11901214
11911215 // https://github.com/apache/datafusion-comet/issues/2038
0 commit comments