@@ -34,8 +34,12 @@ use datafusion::{
3434} ; 
3535use  datafusion_common:: config:: ConfigOptions ; 
3636use  datafusion_execution:: object_store:: ObjectStoreUrl ; 
37+ use  datafusion_expr:: ScalarUDF ; 
38+ use  datafusion_functions:: math:: random:: RandomFunc ; 
3739use  datafusion_functions_aggregate:: count:: count_udaf; 
38- use  datafusion_physical_expr:: { aggregate:: AggregateExprBuilder ,  Partitioning } ; 
40+ use  datafusion_physical_expr:: { 
41+     aggregate:: AggregateExprBuilder ,  Partitioning ,  ScalarFunctionExpr , 
42+ } ; 
3943use  datafusion_physical_expr:: { expressions:: col,  LexOrdering ,  PhysicalSortExpr } ; 
4044use  datafusion_physical_optimizer:: { 
4145    filter_pushdown:: FilterPushdown ,  PhysicalOptimizerRule , 
@@ -76,6 +80,40 @@ fn test_pushdown_into_scan() {
7680    ) ; 
7781} 
7882
83+ #[ test]  
84+ fn  test_pushdown_volatile_functions_not_allowed ( )  { 
85+     // Test that we do not push down filters with volatile functions 
86+     // Use random() as an example of a volatile function 
87+     let  scan = TestScanBuilder :: new ( schema ( ) ) . with_support ( true ) . build ( ) ; 
88+     let  predicate = Arc :: new ( BinaryExpr :: new ( 
89+         Arc :: new ( Column :: new_with_schema ( "a" ,  & schema ( ) ) . unwrap ( ) ) , 
90+         Operator :: Eq , 
91+         Arc :: new ( 
92+             ScalarFunctionExpr :: try_new ( 
93+                 Arc :: new ( ScalarUDF :: from ( RandomFunc :: new ( ) ) ) , 
94+                 vec ! [ ] , 
95+                 & schema ( ) , 
96+             ) 
97+             . unwrap ( ) , 
98+         ) , 
99+     ) )  as  Arc < dyn  PhysicalExpr > ; 
100+     let  plan = Arc :: new ( FilterExec :: try_new ( predicate,  scan) . unwrap ( ) ) ; 
101+     // expect the filter to not be pushed down 
102+     insta:: assert_snapshot!( 
103+         OptimizationTest :: new( plan,  FilterPushdown :: new( ) ,  true ) , 
104+         @r" 
105+     OptimizationTest: 
106+       input: 
107+         - FilterExec: a@0 = random() 
108+         -   DataSourceExec: file_groups={1 group: [[test.parquet]]}, projection=[a, b, c], file_type=test, pushdown_supported=true 
109+       output: 
110+         Ok: 
111+           - FilterExec: a@0 = random() 
112+           -   DataSourceExec: file_groups={1 group: [[test.parquet]]}, projection=[a, b, c], file_type=test, pushdown_supported=true 
113+     " , 
114+     ) ; 
115+ } 
116+ 
79117/// Show that we can use config options to determine how to do pushdown. 
80118#[ test]  
81119fn  test_pushdown_into_scan_with_config_options ( )  { 
0 commit comments