Skip to content

Commit a2b831d

Browse files
committed
Add moar tests
1 parent 5378d32 commit a2b831d

File tree

4 files changed

+86
-4
lines changed

4 files changed

+86
-4
lines changed

Cargo.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/integration_tests/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,3 +37,4 @@ iceberg_test_utils = { path = "../test_utils", features = ["tests"] }
3737
parquet = { workspace = true }
3838
tokio = { workspace = true }
3939
uuid = { workspace = true }
40+
ordered-float = "2.10.1"

crates/integration_tests/testdata/spark/provision.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -114,12 +114,19 @@
114114
spark.sql("ALTER TABLE rest.default.test_rename_column RENAME COLUMN lang TO language")
115115
spark.sql("INSERT INTO rest.default.test_rename_column VALUES ('Java')")
116116

117-
# Create a table, and do some evolution on a partition column
118-
spark.sql("CREATE OR REPLACE TABLE rest.default.test_promote_column (foo int) USING iceberg PARTITIONED BY (foo)")
117+
# Create a table, and do some evolution
118+
spark.sql("CREATE OR REPLACE TABLE rest.default.test_promote_column (foo int) USING iceberg")
119119
spark.sql("INSERT INTO rest.default.test_promote_column VALUES (19)")
120120
spark.sql("ALTER TABLE rest.default.test_promote_column ALTER COLUMN foo TYPE bigint")
121121
spark.sql("INSERT INTO rest.default.test_promote_column VALUES (25)")
122-
spark.sql("INSERT INTO rest.default.test_promote_column VALUES (null)")
122+
123+
# Create a table, and do some evolution on a partition column
124+
spark.sql("CREATE OR REPLACE TABLE rest.default.test_promote_partition_column (foo int, bar float, baz decimal(4, 2)) USING iceberg")
125+
spark.sql("INSERT INTO rest.default.test_promote_partition_column VALUES (19, 19.25, 19.25)")
126+
spark.sql("ALTER TABLE rest.default.test_promote_partition_column ALTER COLUMN foo TYPE bigint")
127+
spark.sql("ALTER TABLE rest.default.test_promote_partition_column ALTER COLUMN bar TYPE double")
128+
spark.sql("ALTER TABLE rest.default.test_promote_partition_column ALTER COLUMN baz TYPE decimal(6, 2)")
129+
spark.sql("INSERT INTO rest.default.test_promote_partition_column VALUES (25, 22.25, 22.25)")
123130

124131
# Create a table with various types
125132
spark.sql("""

crates/integration_tests/tests/shared_tests/read_evolved_schema.rs

Lines changed: 74 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,13 @@
1717

1818
//! Integration tests for rest catalog.
1919
20-
use arrow_array::{Int64Array, StringArray};
20+
use arrow_array::{Decimal128Array, Float64Array, Int64Array, StringArray};
2121
use futures::TryStreamExt;
2222
use iceberg::expr::Reference;
2323
use iceberg::spec::Datum;
2424
use iceberg::{Catalog, TableIdent};
2525
use iceberg_catalog_rest::RestCatalog;
26+
use ordered_float::OrderedFloat;
2627

2728
use crate::get_shared_containers;
2829

@@ -98,4 +99,76 @@ async fn test_evolved_schema() {
9899
actual.sort();
99100

100101
assert_eq!(actual, vec![19, 25]);
102+
103+
// Evolve a partitioned column
104+
let table = rest_catalog
105+
.load_table(&TableIdent::from_strs(["default", "test_promote_partition_column"]).unwrap())
106+
.await
107+
.unwrap();
108+
let scan = table.scan().build();
109+
let batch_stream = scan.unwrap().to_arrow().await.unwrap();
110+
111+
let batches: Vec<_> = batch_stream.try_collect().await.unwrap();
112+
let mut actual_foo = vec![
113+
batches[0]
114+
.column_by_name("foo")
115+
.unwrap()
116+
.as_any()
117+
.downcast_ref::<Int64Array>()
118+
.unwrap()
119+
.value(0),
120+
batches[1]
121+
.column_by_name("foo")
122+
.unwrap()
123+
.as_any()
124+
.downcast_ref::<Int64Array>()
125+
.unwrap()
126+
.value(0),
127+
];
128+
129+
actual_foo.sort();
130+
131+
assert_eq!(actual_foo, vec![19, 25]);
132+
133+
let mut actual_bar = vec![
134+
OrderedFloat( batches[0]
135+
.column_by_name("bar")
136+
.unwrap()
137+
.as_any()
138+
.downcast_ref::<Float64Array>()
139+
.unwrap()
140+
.value(0)),
141+
OrderedFloat( batches[1]
142+
.column_by_name("bar")
143+
.unwrap()
144+
.as_any()
145+
.downcast_ref::<Float64Array>()
146+
.unwrap()
147+
.value(0)),
148+
];
149+
150+
actual_bar.sort();
151+
152+
assert_eq!(actual_bar, vec![19.25, 22.25]);
153+
154+
let mut actual_baz = vec![
155+
batches[0]
156+
.column_by_name("baz")
157+
.unwrap()
158+
.as_any()
159+
.downcast_ref::<Decimal128Array>()
160+
.unwrap()
161+
.value(0),
162+
batches[1]
163+
.column_by_name("baz")
164+
.unwrap()
165+
.as_any()
166+
.downcast_ref::<Decimal128Array>()
167+
.unwrap()
168+
.value(0),
169+
];
170+
171+
actual_baz.sort();
172+
173+
assert_eq!(actual_baz, vec![1925, 2225]);
101174
}

0 commit comments

Comments
 (0)