@@ -36,4 +36,46 @@ but we need:
36
36
...
37
37
```
38
38
39
- more info and cn version to see: https://baifachuan.com/posts/2e6dc139.html
39
+ the example for read in : com.fcbai.parquet.sdk.CustomParquetReaderTest
40
+ ``` java
41
+ @Test
42
+ public void test_read_complex_map_partition_table_content() throws IOException {
43
+ ParquetReader<CustomSimpleRecord > reader = ParquetReader
44
+ .builder(new CustomSimpleReadSupport (), new Path (complexMapFilepath))
45
+ .withFilter(FilterCompat . get(page(1 , 10 )))
46
+ .build();
47
+ for (CustomSimpleRecord value = reader. read(); value != null ; value = reader. read()) {
48
+ System . out. println(value. toJson());
49
+ }
50
+ reader. close();
51
+ }
52
+ ```
53
+
54
+ the write example:
55
+
56
+ ``` java
57
+ @Test
58
+ public void test_write_to_parquet_by_java() throws IOException {
59
+ List<User > users = new ArrayList<> ();
60
+ User user1 = new User (" 1" ," fcbai" ," 123123" );
61
+ User user2 = new User (" 2" ," fcbai" ," 123445" );
62
+ users. add(user1);
63
+ users. add(user2);
64
+ Path dataFile = new Path (" ./src/test/resources/demo.snappy.parquet" );
65
+ // Write as Parquet file.
66
+ try (ParquetWriter<User > writer = AvroParquetWriter . < User > builder(dataFile)
67
+ .withSchema(ReflectData . AllowNull . get(). getSchema(User . class))
68
+ .withDataModel(ReflectData . get())
69
+ .withConf(new Configuration ())
70
+ .withCompressionCodec(SNAPPY )
71
+ .withWriteMode(OVERWRITE )
72
+ .build()) {
73
+ for (User user : users) {
74
+ writer. write(user);
75
+ }
76
+ }
77
+ }
78
+ ```
79
+
80
+
81
+ more info and cn version to see: https://baifachuan.com/posts/2e6dc139.html
0 commit comments