@@ -50,7 +50,7 @@ use arrow::{
5050 record_batch:: RecordBatch ,
5151} ;
5252use fmt:: { Debug , Formatter } ;
53- use std:: convert:: TryInto ;
53+ use std:: convert:: { TryFrom , TryInto } ;
5454use std:: { any:: Any , fmt, str:: FromStr , sync:: Arc } ;
5555
5656/// A function's signature, which defines the function's supported argument types.
@@ -1359,6 +1359,21 @@ impl fmt::Display for ScalarFunctionExpr {
13591359 }
13601360}
13611361
1362+ /// null columnar values are implemented as a scalar columnar value with u32 value, adding a type
1363+ /// alias to hide the fact.
1364+ type NullColumnarValue = ColumnarValue ;
1365+
1366+ impl TryFrom < & RecordBatch > for NullColumnarValue {
1367+ type Error = DataFusionError ;
1368+ fn try_from ( batch : & RecordBatch ) -> Result < Self > {
1369+ let num_rows = batch
1370+ . num_rows ( )
1371+ . try_into ( )
1372+ . map_err ( |_| DataFusionError :: Internal ( "Batch size too large" . to_string ( ) ) ) ?;
1373+ Ok ( ColumnarValue :: Scalar ( ScalarValue :: UInt32 ( Some ( num_rows) ) ) )
1374+ }
1375+ }
1376+
13621377impl PhysicalExpr for ScalarFunctionExpr {
13631378 /// Return a reference to Any that can be used for downcasting
13641379 fn as_any ( & self ) -> & dyn Any {
@@ -1377,11 +1392,7 @@ impl PhysicalExpr for ScalarFunctionExpr {
13771392 // evaluate the arguments, if there are no arguments we'll instead pass in an uint32 holding
13781393 // batch size (as a convention)
13791394 let inputs = match self . args . len ( ) {
1380- 0 => vec ! [ ColumnarValue :: Scalar ( ScalarValue :: UInt32 ( Some (
1381- batch. num_rows( ) . try_into( ) . map_err( |_| {
1382- DataFusionError :: Internal ( "Batch size too large" . to_string( ) )
1383- } ) ?,
1384- ) ) ) ] ,
1395+ 0 => vec ! [ NullColumnarValue :: try_from( batch) ?] ,
13851396 _ => self
13861397 . args
13871398 . iter ( )
0 commit comments