Skip to content

Commit ecfda34

Browse files
Address Eduard's feedback: Add unit tests and enable tests across Spark versions
- Add unit tests in api module for string-to-binary and string-to-fixed conversions - Test valid hex string conversions (uppercase and lowercase) - Test invalid hex strings return null - Test fixed type with wrong length returns null - Remove @disabled testBinaryInFilter from Spark v3.4, v3.5, and v4.0 - The fix in Literals.java now properly handles binary/fixed types - Tests should work across all Spark versions
1 parent 8302bac commit ecfda34

4 files changed

Lines changed: 56 additions & 27 deletions

File tree

api/src/test/java/org/apache/iceberg/expressions/TestMiscLiteralConversions.java

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,62 @@ public void testFixedToBinary() {
107107
.isEqualTo(lit.value().duplicate());
108108
}
109109

110+
@Test
111+
public void testStringToFixed() {
112+
// Test valid hex string to fixed conversion
113+
Literal<CharSequence> hexString = Literal.of("000102");
114+
Literal<ByteBuffer> fixedLit = hexString.to(Types.FixedType.ofLength(3));
115+
assertThat(fixedLit).as("Should convert valid hex string to fixed").isNotNull();
116+
assertThat(fixedLit.value().array())
117+
.as("Should decode hex string correctly")
118+
.isEqualTo(new byte[] {0, 1, 2});
119+
120+
// Test lowercase hex string
121+
Literal<CharSequence> lowercaseHex = Literal.of("0a0b0c");
122+
Literal<ByteBuffer> lowercaseFixed = lowercaseHex.to(Types.FixedType.ofLength(3));
123+
assertThat(lowercaseFixed).as("Should convert lowercase hex string to fixed").isNotNull();
124+
assertThat(lowercaseFixed.value().array())
125+
.as("Should decode lowercase hex string correctly")
126+
.isEqualTo(new byte[] {10, 11, 12});
127+
128+
// Test wrong length returns null
129+
Literal<CharSequence> wrongLength = Literal.of("0001");
130+
assertThat(wrongLength.to(Types.FixedType.ofLength(3)))
131+
.as("Should return null for wrong length")
132+
.isNull();
133+
134+
// Test invalid hex string returns null
135+
Literal<CharSequence> invalidHex = Literal.of("GGHHII");
136+
assertThat(invalidHex.to(Types.FixedType.ofLength(3)))
137+
.as("Should return null for invalid hex string")
138+
.isNull();
139+
}
140+
141+
@Test
142+
public void testStringToBinary() {
143+
// Test valid hex string to binary conversion
144+
Literal<CharSequence> hexString = Literal.of("000102");
145+
Literal<ByteBuffer> binaryLit = hexString.to(Types.BinaryType.get());
146+
assertThat(binaryLit).as("Should convert valid hex string to binary").isNotNull();
147+
assertThat(binaryLit.value().array())
148+
.as("Should decode hex string correctly")
149+
.isEqualTo(new byte[] {0, 1, 2});
150+
151+
// Test lowercase hex string
152+
Literal<CharSequence> lowercaseHex = Literal.of("0a0b0c");
153+
Literal<ByteBuffer> lowercaseBinary = lowercaseHex.to(Types.BinaryType.get());
154+
assertThat(lowercaseBinary).as("Should convert lowercase hex string to binary").isNotNull();
155+
assertThat(lowercaseBinary.value().array())
156+
.as("Should decode lowercase hex string correctly")
157+
.isEqualTo(new byte[] {10, 11, 12});
158+
159+
// Test invalid hex string returns null
160+
Literal<CharSequence> invalidHex = Literal.of("GGHHII");
161+
assertThat(invalidHex.to(Types.BinaryType.get()))
162+
.as("Should return null for invalid hex string")
163+
.isNull();
164+
}
165+
110166
@Test
111167
public void testInvalidBooleanConversions() {
112168
testInvalidConversions(

spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRemoteScanPlanning.java

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,6 @@
2525
import org.apache.iceberg.rest.RESTCatalogProperties;
2626
import org.apache.iceberg.spark.SparkCatalogConfig;
2727
import org.apache.iceberg.spark.sql.TestSelect;
28-
import org.junit.jupiter.api.Disabled;
29-
import org.junit.jupiter.api.TestTemplate;
3028
import org.junit.jupiter.api.extension.ExtendWith;
3129

3230
@ExtendWith(ParameterizedTestExtension.class)
@@ -48,11 +46,4 @@ protected static Object[][] parameters() {
4846
}
4947
};
5048
}
51-
52-
@TestTemplate
53-
@Disabled(
54-
"binary filter that is used by Spark is not working because ExpressionParser.fromJSON doesn't have the Schema to properly parse the filter expression")
55-
public void testBinaryInFilter() {
56-
super.testBinaryInFilter();
57-
}
5849
}

spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRemoteScanPlanning.java

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,6 @@
2525
import org.apache.iceberg.rest.RESTCatalogProperties;
2626
import org.apache.iceberg.spark.SparkCatalogConfig;
2727
import org.apache.iceberg.spark.sql.TestSelect;
28-
import org.junit.jupiter.api.Disabled;
29-
import org.junit.jupiter.api.TestTemplate;
3028
import org.junit.jupiter.api.extension.ExtendWith;
3129

3230
@ExtendWith(ParameterizedTestExtension.class)
@@ -48,11 +46,4 @@ protected static Object[][] parameters() {
4846
}
4947
};
5048
}
51-
52-
@TestTemplate
53-
@Disabled(
54-
"binary filter that is used by Spark is not working because ExpressionParser.fromJSON doesn't have the Schema to properly parse the filter expression")
55-
public void testBinaryInFilter() {
56-
super.testBinaryInFilter();
57-
}
5849
}

spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRemoteScanPlanning.java

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,6 @@
2525
import org.apache.iceberg.rest.RESTCatalogProperties;
2626
import org.apache.iceberg.spark.SparkCatalogConfig;
2727
import org.apache.iceberg.spark.sql.TestSelect;
28-
import org.junit.jupiter.api.Disabled;
29-
import org.junit.jupiter.api.TestTemplate;
3028
import org.junit.jupiter.api.extension.ExtendWith;
3129

3230
@ExtendWith(ParameterizedTestExtension.class)
@@ -48,11 +46,4 @@ protected static Object[][] parameters() {
4846
}
4947
};
5048
}
51-
52-
@TestTemplate
53-
@Disabled(
54-
"binary filter that is used by Spark is not working because ExpressionParser.fromJSON doesn't have the Schema to properly parse the filter expression")
55-
public void testBinaryInFilter() {
56-
super.testBinaryInFilter();
57-
}
5849
}

0 commit comments

Comments
 (0)