From f7ac35182b683c15db994419564546b56b2f78ad Mon Sep 17 00:00:00 2001 From: Xuanwo Date: Fri, 3 Jan 2025 12:16:25 +0000 Subject: [PATCH] deploy: 1c632b869a0b9076f49df3b266c7d9fe1a648125 --- api/help.html | 2 +- .../arrow/fn.arrow_schema_to_schema.html | 2 +- api/iceberg/arrow/fn.arrow_type_to_type.html | 2 +- .../arrow/fn.schema_to_arrow_schema.html | 2 +- api/iceberg/arrow/fn.type_to_arrow_type.html | 2 +- .../arrow/trait.ArrowSchemaVisitor.html | 32 +- api/iceberg/spec/struct.Schema.html | 2 +- api/settings.html | 2 +- api/src/iceberg/arrow/schema.rs.html | 16 +- .../writer/file_writer/parquet_writer.rs.html | 490 +++++++++++++++++- 10 files changed, 527 insertions(+), 25 deletions(-) diff --git a/api/help.html b/api/help.html index f79e7b68c..b3855f23b 100644 --- a/api/help.html +++ b/api/help.html @@ -1 +1 @@ -Help

Rustdoc help

Back
\ No newline at end of file +Help

Rustdoc help

Back
\ No newline at end of file diff --git a/api/iceberg/arrow/fn.arrow_schema_to_schema.html b/api/iceberg/arrow/fn.arrow_schema_to_schema.html index 77a2f0b8f..7ab16c264 100644 --- a/api/iceberg/arrow/fn.arrow_schema_to_schema.html +++ b/api/iceberg/arrow/fn.arrow_schema_to_schema.html @@ -1,2 +1,2 @@ -arrow_schema_to_schema in iceberg::arrow - Rust

Function iceberg::arrow::arrow_schema_to_schema

source ·
pub fn arrow_schema_to_schema(schema: &Schema) -> Result<Schema>
Expand description

Convert Arrow schema to Iceberg schema.

+arrow_schema_to_schema in iceberg::arrow - Rust

Function iceberg::arrow::arrow_schema_to_schema

source ·
pub fn arrow_schema_to_schema(schema: &Schema) -> Result<Schema>
Expand description

Convert Arrow schema to Iceberg schema.

\ No newline at end of file diff --git a/api/iceberg/arrow/fn.arrow_type_to_type.html b/api/iceberg/arrow/fn.arrow_type_to_type.html index 726c3577a..46ab6db54 100644 --- a/api/iceberg/arrow/fn.arrow_type_to_type.html +++ b/api/iceberg/arrow/fn.arrow_type_to_type.html @@ -1,2 +1,2 @@ -arrow_type_to_type in iceberg::arrow - Rust

Function iceberg::arrow::arrow_type_to_type

source ·
pub fn arrow_type_to_type(ty: &DataType) -> Result<Type>
Expand description

Convert Arrow type to iceberg type.

+arrow_type_to_type in iceberg::arrow - Rust

Function iceberg::arrow::arrow_type_to_type

source ·
pub fn arrow_type_to_type(ty: &DataType) -> Result<Type>
Expand description

Convert Arrow type to iceberg type.

\ No newline at end of file diff --git a/api/iceberg/arrow/fn.schema_to_arrow_schema.html b/api/iceberg/arrow/fn.schema_to_arrow_schema.html index 108794e98..e0cf63237 100644 --- a/api/iceberg/arrow/fn.schema_to_arrow_schema.html +++ b/api/iceberg/arrow/fn.schema_to_arrow_schema.html @@ -1,2 +1,2 @@ -schema_to_arrow_schema in iceberg::arrow - Rust

Function iceberg::arrow::schema_to_arrow_schema

source ·
pub fn schema_to_arrow_schema(schema: &Schema) -> Result<Schema>
Expand description

Convert iceberg schema to an arrow schema.

+schema_to_arrow_schema in iceberg::arrow - Rust

Function iceberg::arrow::schema_to_arrow_schema

source ·
pub fn schema_to_arrow_schema(schema: &Schema) -> Result<Schema>
Expand description

Convert iceberg schema to an arrow schema.

\ No newline at end of file diff --git a/api/iceberg/arrow/fn.type_to_arrow_type.html b/api/iceberg/arrow/fn.type_to_arrow_type.html index 8dc21d3f4..3f6f710ea 100644 --- a/api/iceberg/arrow/fn.type_to_arrow_type.html +++ b/api/iceberg/arrow/fn.type_to_arrow_type.html @@ -1,2 +1,2 @@ -type_to_arrow_type in iceberg::arrow - Rust

Function iceberg::arrow::type_to_arrow_type

source ·
pub fn type_to_arrow_type(ty: &Type) -> Result<DataType>
Expand description

Convert iceberg type to an arrow type.

+type_to_arrow_type in iceberg::arrow - Rust

Function iceberg::arrow::type_to_arrow_type

source ·
pub fn type_to_arrow_type(ty: &Type) -> Result<DataType>
Expand description

Convert iceberg type to an arrow type.

\ No newline at end of file diff --git a/api/iceberg/arrow/trait.ArrowSchemaVisitor.html b/api/iceberg/arrow/trait.ArrowSchemaVisitor.html index 581b17151..80d0c05a1 100644 --- a/api/iceberg/arrow/trait.ArrowSchemaVisitor.html +++ b/api/iceberg/arrow/trait.ArrowSchemaVisitor.html @@ -1,4 +1,4 @@ -ArrowSchemaVisitor in iceberg::arrow - Rust

Trait iceberg::arrow::ArrowSchemaVisitor

source ·
pub trait ArrowSchemaVisitor {
+ArrowSchemaVisitor in iceberg::arrow - Rust

Trait iceberg::arrow::ArrowSchemaVisitor

source ·
pub trait ArrowSchemaVisitor {
     type T;
     type U;
 
@@ -33,28 +33,28 @@
     fn after_map_value(&mut self, _field: &Field) -> Result<()> { ... }
 
}
Expand description

A post order arrow schema visitor.

For order of methods called, please refer to [visit_schema].

-

Required Associated Types§

source

type T

Return type of this visitor on arrow field.

-
source

type U

Return type of this visitor on arrow schema.

-

Required Methods§

Required Associated Types§

source

type T

Return type of this visitor on arrow field.

+
source

type U

Return type of this visitor on arrow schema.

+

Required Methods§

source

fn schema( &mut self, schema: &ArrowSchema, values: Vec<Self::T>, ) -> Result<Self::U>

Called after schema’s type visited.

-
source

fn struct(&mut self, fields: &Fields, results: Vec<Self::T>) -> Result<Self::T>

Called after struct’s fields visited.

-
source

fn list(&mut self, list: &DataType, value: Self::T) -> Result<Self::T>

Called after list fields visited.

-
source

fn map( +

source

fn struct(&mut self, fields: &Fields, results: Vec<Self::T>) -> Result<Self::T>

Called after struct’s fields visited.

+
source

fn list(&mut self, list: &DataType, value: Self::T) -> Result<Self::T>

Called after list fields visited.

+
source

fn map( &mut self, map: &DataType, key_value: Self::T, value: Self::T, ) -> Result<Self::T>

Called after map’s key and value fields visited.

-
source

fn primitive(&mut self, p: &DataType) -> Result<Self::T>

Called when see a primitive type.

-

Provided Methods§

source

fn before_field(&mut self, _field: &Field) -> Result<()>

Called before struct/list/map field.

-
source

fn after_field(&mut self, _field: &Field) -> Result<()>

Called after struct/list/map field.

-
source

fn before_list_element(&mut self, _field: &Field) -> Result<()>

Called before list element.

-
source

fn after_list_element(&mut self, _field: &Field) -> Result<()>

Called after list element.

-
source

fn before_map_key(&mut self, _field: &Field) -> Result<()>

Called before map key.

-
source

fn after_map_key(&mut self, _field: &Field) -> Result<()>

Called after map key.

-
source

fn before_map_value(&mut self, _field: &Field) -> Result<()>

Called before map value.

-
source

fn after_map_value(&mut self, _field: &Field) -> Result<()>

Called after map value.

+
source

fn primitive(&mut self, p: &DataType) -> Result<Self::T>

Called when see a primitive type.

+

Provided Methods§

source

fn before_field(&mut self, _field: &Field) -> Result<()>

Called before struct/list/map field.

+
source

fn after_field(&mut self, _field: &Field) -> Result<()>

Called after struct/list/map field.

+
source

fn before_list_element(&mut self, _field: &Field) -> Result<()>

Called before list element.

+
source

fn after_list_element(&mut self, _field: &Field) -> Result<()>

Called after list element.

+
source

fn before_map_key(&mut self, _field: &Field) -> Result<()>

Called before map key.

+
source

fn after_map_key(&mut self, _field: &Field) -> Result<()>

Called after map key.

+
source

fn before_map_value(&mut self, _field: &Field) -> Result<()>

Called before map value.

+
source

fn after_map_value(&mut self, _field: &Field) -> Result<()>

Called after map value.

Implementors§

\ No newline at end of file diff --git a/api/iceberg/spec/struct.Schema.html b/api/iceberg/spec/struct.Schema.html index 784d5eafa..fc53be5fe 100644 --- a/api/iceberg/spec/struct.Schema.html +++ b/api/iceberg/spec/struct.Schema.html @@ -21,7 +21,7 @@ __D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
source§

impl Display for Schema

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl PartialEq for Schema

source§

fn eq(&self, other: &Self) -> bool

This method tests for self and other values to be equal, and is used by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always sufficient, and should not be overridden without very good reason.
source§

impl Serialize for Schema

source§

fn serialize<__S>(&self, __serializer: __S) -> Result<__S::Ok, __S::Error>
where - __S: Serializer,

Serialize this value into the given Serde serializer. Read more
source§

impl TryFrom<&Schema> for Schema

§

type Error = Error

The type returned in the event of a conversion error.
source§

fn try_from(schema: &Schema) -> Result<Self>

Performs the conversion.
source§

impl TryFrom<&Schema> for Schema

§

type Error = Error

The type returned in the event of a conversion error.
source§

fn try_from(schema: &ArrowSchema) -> Result<Self>

Performs the conversion.
source§

impl Eq for Schema

Auto Trait Implementations§

§

impl !Freeze for Schema

§

impl RefUnwindSafe for Schema

§

impl Send for Schema

§

impl Sync for Schema

§

impl Unpin for Schema

§

impl UnwindSafe for Schema

Blanket Implementations§

source§

impl<T> Any for T
where + __S: Serializer,

Serialize this value into the given Serde serializer. Read more
source§

impl TryFrom<&Schema> for Schema

§

type Error = Error

The type returned in the event of a conversion error.
source§

fn try_from(schema: &Schema) -> Result<Self>

Performs the conversion.
source§

impl TryFrom<&Schema> for Schema

§

type Error = Error

The type returned in the event of a conversion error.
source§

fn try_from(schema: &ArrowSchema) -> Result<Self>

Performs the conversion.
source§

impl Eq for Schema

Auto Trait Implementations§

§

impl !Freeze for Schema

§

impl RefUnwindSafe for Schema

§

impl Send for Schema

§

impl Sync for Schema

§

impl Unpin for Schema

§

impl UnwindSafe for Schema

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
§

impl<T> Conv for T

§

fn conv<T>(self) -> T
where diff --git a/api/settings.html b/api/settings.html index dfaf9d286..0b4d7eab8 100644 --- a/api/settings.html +++ b/api/settings.html @@ -1 +1 @@ -Settings

Rustdoc settings

Back
\ No newline at end of file +Settings

Rustdoc settings

Back
\ No newline at end of file diff --git a/api/src/iceberg/arrow/schema.rs.html b/api/src/iceberg/arrow/schema.rs.html index da03d4b87..3ecc9a25e 100644 --- a/api/src/iceberg/arrow/schema.rs.html +++ b/api/src/iceberg/arrow/schema.rs.html @@ -1511,6 +1511,13 @@ 1511 1512 1513 +1514 +1515 +1516 +1517 +1518 +1519 +1520
// Licensed to the Apache Software Foundation (ASF) under one
 // or more contributor license agreements.  See the NOTICE file
 // distributed with this work for additional information
@@ -1542,6 +1549,7 @@
 };
 use arrow_schema::{DataType, Field, Fields, Schema as ArrowSchema, TimeUnit};
 use bitvec::macros::internal::funty::Fundamental;
+use num_bigint::BigInt;
 use parquet::arrow::PARQUET_FIELD_ID_META_KEY;
 use parquet::file::statistics::Statistics;
 use rust_decimal::prelude::ToPrimitive;
@@ -2252,9 +2260,15 @@
                     let Some(bytes) = stats.[<$limit_type _bytes_opt>]() else {
                         return Ok(None);
                     };
+                    let unscaled_value = BigInt::from_signed_bytes_be(bytes);
                     Some(Datum::new(
                         primitive_type.clone(),
-                        PrimitiveLiteral::Int128(i128::from_be_bytes(bytes.try_into()?)),
+                        PrimitiveLiteral::Int128(unscaled_value.to_i128().ok_or_else(|| {
+                            Error::new(
+                                ErrorKind::DataInvalid,
+                                format!("Can't convert bytes to i128: {:?}", bytes),
+                            )
+                        })?),
                     ))
                 }
                 (
diff --git a/api/src/iceberg/writer/file_writer/parquet_writer.rs.html b/api/src/iceberg/writer/file_writer/parquet_writer.rs.html
index 0a51a0a12..e2680d8ca 100644
--- a/api/src/iceberg/writer/file_writer/parquet_writer.rs.html
+++ b/api/src/iceberg/writer/file_writer/parquet_writer.rs.html
@@ -1170,6 +1170,250 @@
 1170
 1171
 1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
 

// Licensed to the Apache Software Foundation (ASF) under one
 // or more contributor license agreements.  See the NOTICE file
 // distributed with this work for additional information
@@ -1650,15 +1894,18 @@
     use anyhow::Result;
     use arrow_array::types::Int64Type;
     use arrow_array::{
-        Array, ArrayRef, BooleanArray, Int32Array, Int64Array, ListArray, RecordBatch, StructArray,
+        Array, ArrayRef, BooleanArray, Decimal128Array, Int32Array, Int64Array, ListArray,
+        RecordBatch, StructArray,
     };
     use arrow_schema::{DataType, SchemaRef as ArrowSchemaRef};
     use arrow_select::concat::concat_batches;
     use parquet::arrow::PARQUET_FIELD_ID_META_KEY;
+    use rust_decimal::Decimal;
     use tempfile::TempDir;
     use uuid::Uuid;
 
     use super::*;
+    use crate::arrow::schema_to_arrow_schema;
     use crate::io::FileIOBuilder;
     use crate::spec::{PrimitiveLiteral, Struct, *};
     use crate::writer::file_writer::location_generator::test::MockLocationGenerator;
@@ -2341,5 +2588,246 @@
 
         Ok(())
     }
+
+    #[tokio::test]
+    async fn test_decimal_bound() -> Result<()> {
+        let temp_dir = TempDir::new().unwrap();
+        let file_io = FileIOBuilder::new_fs_io().build().unwrap();
+        let loccation_gen =
+            MockLocationGenerator::new(temp_dir.path().to_str().unwrap().to_string());
+        let file_name_gen =
+            DefaultFileNameGenerator::new("test".to_string(), None, DataFileFormat::Parquet);
+
+        // test 1.1 and 2.2
+        let schema = Arc::new(
+            Schema::builder()
+                .with_fields(vec![NestedField::optional(
+                    0,
+                    "decimal",
+                    Type::Primitive(PrimitiveType::Decimal {
+                        precision: 28,
+                        scale: 10,
+                    }),
+                )
+                .into()])
+                .build()
+                .unwrap(),
+        );
+        let arrow_schema: ArrowSchemaRef = Arc::new(schema_to_arrow_schema(&schema).unwrap());
+        let mut pw = ParquetWriterBuilder::new(
+            WriterProperties::builder().build(),
+            schema.clone(),
+            file_io.clone(),
+            loccation_gen.clone(),
+            file_name_gen.clone(),
+        )
+        .build()
+        .await?;
+        let col0 = Arc::new(
+            Decimal128Array::from(vec![Some(22000000000), Some(11000000000)])
+                .with_data_type(DataType::Decimal128(28, 10)),
+        ) as ArrayRef;
+        let to_write = RecordBatch::try_new(arrow_schema.clone(), vec![col0]).unwrap();
+        pw.write(&to_write).await?;
+        let res = pw.close().await?;
+        assert_eq!(res.len(), 1);
+        let data_file = res
+            .into_iter()
+            .next()
+            .unwrap()
+            .content(crate::spec::DataContentType::Data)
+            .partition(Struct::empty())
+            .build()
+            .unwrap();
+        assert_eq!(
+            data_file.upper_bounds().get(&0),
+            Some(Datum::decimal_with_precision(Decimal::new(22000000000_i64, 10), 28).unwrap())
+                .as_ref()
+        );
+        assert_eq!(
+            data_file.lower_bounds().get(&0),
+            Some(Datum::decimal_with_precision(Decimal::new(11000000000_i64, 10), 28).unwrap())
+                .as_ref()
+        );
+
+        // test -1.1 and -2.2
+        let schema = Arc::new(
+            Schema::builder()
+                .with_fields(vec![NestedField::optional(
+                    0,
+                    "decimal",
+                    Type::Primitive(PrimitiveType::Decimal {
+                        precision: 28,
+                        scale: 10,
+                    }),
+                )
+                .into()])
+                .build()
+                .unwrap(),
+        );
+        let arrow_schema: ArrowSchemaRef = Arc::new(schema_to_arrow_schema(&schema).unwrap());
+        let mut pw = ParquetWriterBuilder::new(
+            WriterProperties::builder().build(),
+            schema.clone(),
+            file_io.clone(),
+            loccation_gen.clone(),
+            file_name_gen.clone(),
+        )
+        .build()
+        .await?;
+        let col0 = Arc::new(
+            Decimal128Array::from(vec![Some(-22000000000), Some(-11000000000)])
+                .with_data_type(DataType::Decimal128(28, 10)),
+        ) as ArrayRef;
+        let to_write = RecordBatch::try_new(arrow_schema.clone(), vec![col0]).unwrap();
+        pw.write(&to_write).await?;
+        let res = pw.close().await?;
+        assert_eq!(res.len(), 1);
+        let data_file = res
+            .into_iter()
+            .next()
+            .unwrap()
+            .content(crate::spec::DataContentType::Data)
+            .partition(Struct::empty())
+            .build()
+            .unwrap();
+        assert_eq!(
+            data_file.upper_bounds().get(&0),
+            Some(Datum::decimal_with_precision(Decimal::new(-11000000000_i64, 10), 28).unwrap())
+                .as_ref()
+        );
+        assert_eq!(
+            data_file.lower_bounds().get(&0),
+            Some(Datum::decimal_with_precision(Decimal::new(-22000000000_i64, 10), 28).unwrap())
+                .as_ref()
+        );
+
+        // test max and min of rust_decimal
+        let decimal_max = Decimal::MAX;
+        let decimal_min = Decimal::MIN;
+        assert_eq!(decimal_max.scale(), decimal_min.scale());
+        let schema = Arc::new(
+            Schema::builder()
+                .with_fields(vec![NestedField::optional(
+                    0,
+                    "decimal",
+                    Type::Primitive(PrimitiveType::Decimal {
+                        precision: 38,
+                        scale: decimal_max.scale(),
+                    }),
+                )
+                .into()])
+                .build()
+                .unwrap(),
+        );
+        let arrow_schema: ArrowSchemaRef = Arc::new(schema_to_arrow_schema(&schema).unwrap());
+        let mut pw = ParquetWriterBuilder::new(
+            WriterProperties::builder().build(),
+            schema,
+            file_io.clone(),
+            loccation_gen,
+            file_name_gen,
+        )
+        .build()
+        .await?;
+        let col0 = Arc::new(
+            Decimal128Array::from(vec![
+                Some(decimal_max.mantissa()),
+                Some(decimal_min.mantissa()),
+            ])
+            .with_data_type(DataType::Decimal128(38, 0)),
+        ) as ArrayRef;
+        let to_write = RecordBatch::try_new(arrow_schema.clone(), vec![col0]).unwrap();
+        pw.write(&to_write).await?;
+        let res = pw.close().await?;
+        assert_eq!(res.len(), 1);
+        let data_file = res
+            .into_iter()
+            .next()
+            .unwrap()
+            .content(crate::spec::DataContentType::Data)
+            .partition(Struct::empty())
+            .build()
+            .unwrap();
+        assert_eq!(
+            data_file.upper_bounds().get(&0),
+            Some(Datum::decimal(decimal_max).unwrap()).as_ref()
+        );
+        assert_eq!(
+            data_file.lower_bounds().get(&0),
+            Some(Datum::decimal(decimal_min).unwrap()).as_ref()
+        );
+
+        // test max and min for scale 38
+        // # TODO
+        // Readd this case after resolve https://github.com/apache/iceberg-rust/issues/669
+        // let schema = Arc::new(
+        //     Schema::builder()
+        //         .with_fields(vec![NestedField::optional(
+        //             0,
+        //             "decimal",
+        //             Type::Primitive(PrimitiveType::Decimal {
+        //                 precision: 38,
+        //                 scale: 0,
+        //             }),
+        //         )
+        //         .into()])
+        //         .build()
+        //         .unwrap(),
+        // );
+        // let arrow_schema: ArrowSchemaRef = Arc::new(schema_to_arrow_schema(&schema).unwrap());
+        // let mut pw = ParquetWriterBuilder::new(
+        //     WriterProperties::builder().build(),
+        //     schema,
+        //     file_io.clone(),
+        //     loccation_gen,
+        //     file_name_gen,
+        // )
+        // .build()
+        // .await?;
+        // let col0 = Arc::new(
+        //     Decimal128Array::from(vec![
+        //         Some(99999999999999999999999999999999999999_i128),
+        //         Some(-99999999999999999999999999999999999999_i128),
+        //     ])
+        //     .with_data_type(DataType::Decimal128(38, 0)),
+        // ) as ArrayRef;
+        // let to_write = RecordBatch::try_new(arrow_schema.clone(), vec![col0]).unwrap();
+        // pw.write(&to_write).await?;
+        // let res = pw.close().await?;
+        // assert_eq!(res.len(), 1);
+        // let data_file = res
+        //     .into_iter()
+        //     .next()
+        //     .unwrap()
+        //     .content(crate::spec::DataContentType::Data)
+        //     .partition(Struct::empty())
+        //     .build()
+        //     .unwrap();
+        // assert_eq!(
+        //     data_file.upper_bounds().get(&0),
+        //     Some(Datum::new(
+        //         PrimitiveType::Decimal {
+        //             precision: 38,
+        //             scale: 0
+        //         },
+        //         PrimitiveLiteral::Int128(99999999999999999999999999999999999999_i128)
+        //     ))
+        //     .as_ref()
+        // );
+        // assert_eq!(
+        //     data_file.lower_bounds().get(&0),
+        //     Some(Datum::new(
+        //         PrimitiveType::Decimal {
+        //             precision: 38,
+        //             scale: 0
+        //         },
+        //         PrimitiveLiteral::Int128(-99999999999999999999999999999999999999_i128)
+        //     ))
+        //     .as_ref()
+        // );
+
+        Ok(())
+    }
 }
 
\ No newline at end of file