@@ -6,10 +6,11 @@ use convergence::protocol_ext::DataRowBatch;
66use convergence:: server:: { self , BindOptions } ;
77use convergence:: sqlparser:: ast:: Statement ;
88use convergence_arrow:: table:: { record_batch_to_rows, schema_to_field_desc} ;
9- use datafusion:: arrow:: array:: { ArrayRef , Date32Array , Float32Array , Int32Array , StringArray , TimestampSecondArray } ;
9+ use datafusion:: arrow:: array:: { ArrayRef , Date32Array , Decimal128Array , Float32Array , Int32Array , StringArray , StringViewArray , TimestampSecondArray } ;
1010use datafusion:: arrow:: datatypes:: { DataType , Field , Schema , TimeUnit } ;
1111use datafusion:: arrow:: record_batch:: RecordBatch ;
1212use std:: sync:: Arc ;
13+ use rust_decimal:: Decimal ;
1314use tokio_postgres:: { connect, NoTls } ;
1415
1516struct ArrowPortal {
@@ -31,20 +32,22 @@ impl ArrowEngine {
3132 fn new ( ) -> Self {
3233 let int_col = Arc :: new ( Int32Array :: from ( vec ! [ 1 , 2 , 3 ] ) ) as ArrayRef ;
3334 let float_col = Arc :: new ( Float32Array :: from ( vec ! [ 1.5 , 2.5 , 3.5 ] ) ) as ArrayRef ;
35+ let decimal_col = Arc :: new ( Decimal128Array :: from ( vec ! [ 11 , 22 , 33 ] ) . with_precision_and_scale ( 2 , 0 ) . unwrap ( ) ) as ArrayRef ;
3436 let string_col = Arc :: new ( StringArray :: from ( vec ! [ "a" , "b" , "c" ] ) ) as ArrayRef ;
3537 let ts_col = Arc :: new ( TimestampSecondArray :: from ( vec ! [ 1577836800 , 1580515200 , 1583020800 ] ) ) as ArrayRef ;
3638 let date_col = Arc :: new ( Date32Array :: from ( vec ! [ 0 , 1 , 2 ] ) ) as ArrayRef ;
3739
3840 let schema = Schema :: new ( vec ! [
3941 Field :: new( "int_col" , DataType :: Int32 , true ) ,
4042 Field :: new( "float_col" , DataType :: Float32 , true ) ,
43+ Field :: new( "decimal_col" , DataType :: Decimal128 ( 2 , 0 ) , true ) ,
4144 Field :: new( "string_col" , DataType :: Utf8 , true ) ,
4245 Field :: new( "ts_col" , DataType :: Timestamp ( TimeUnit :: Second , None ) , true ) ,
4346 Field :: new( "date_col" , DataType :: Date32 , true ) ,
4447 ] ) ;
4548
4649 Self {
47- batch : RecordBatch :: try_new ( Arc :: new ( schema) , vec ! [ int_col, float_col, string_col, ts_col, date_col] )
50+ batch : RecordBatch :: try_new ( Arc :: new ( schema) , vec ! [ int_col, float_col, decimal_col , string_col, string_view_col , ts_col, date_col] )
4851 . expect ( "failed to create batch" ) ,
4952 }
5053 }
@@ -89,8 +92,8 @@ async fn basic_data_types() {
8992 let rows = client. query ( "select 1" , & [ ] ) . await . unwrap ( ) ;
9093 let get_row = |idx : usize | {
9194 let row = & rows[ idx] ;
92- let cols: ( i32 , f32 , & str , NaiveDateTime , NaiveDate ) =
93- ( row. get ( 0 ) , row. get ( 1 ) , row. get ( 2 ) , row. get ( 3 ) , row. get ( 4 ) ) ;
95+ let cols: ( i32 , f32 , Decimal , & str , & str , NaiveDateTime , NaiveDate ) =
96+ ( row. get ( 0 ) , row. get ( 1 ) , row. get ( 2 ) , row. get ( 3 ) , row. get ( 4 ) , row . get ( 5 ) , row . get ( 6 ) ) ;
9497 cols
9598 } ;
9699
@@ -99,6 +102,7 @@ async fn basic_data_types() {
99102 (
100103 1 ,
101104 1.5 ,
105+ Decimal :: from( 11 ) ,
102106 "a" ,
103107 NaiveDate :: from_ymd_opt( 2020 , 1 , 1 )
104108 . unwrap( )
@@ -112,6 +116,7 @@ async fn basic_data_types() {
112116 (
113117 2 ,
114118 2.5 ,
119+ Decimal :: from( 22 ) ,
115120 "b" ,
116121 NaiveDate :: from_ymd_opt( 2020 , 2 , 1 )
117122 . unwrap( )
@@ -125,6 +130,7 @@ async fn basic_data_types() {
125130 (
126131 3 ,
127132 3.5 ,
133+ Decimal :: from( 33 ) ,
128134 "c" ,
129135 NaiveDate :: from_ymd_opt( 2020 , 3 , 1 )
130136 . unwrap( )
0 commit comments