@@ -985,7 +985,7 @@ pub async fn fetch_parquet_metadata(
985985 #[ allow( unused) ] decryption_properties : Option < & FileDecryptionProperties > ,
986986 cache_metadata : bool ,
987987 file_metadata_cache : Option < Arc < dyn FileMetadataCache > > ,
988- ) -> Result < ParquetMetaData > {
988+ ) -> Result < Arc < ParquetMetaData > > {
989989 // Check cache first if caching is enabled
990990 if cache_metadata {
991991 if let Some ( cache) = & file_metadata_cache {
@@ -994,7 +994,7 @@ pub async fn fetch_parquet_metadata(
994994 . as_any ( )
995995 . downcast_ref :: < CachedParquetMetaData > ( )
996996 {
997- return Ok ( parquet_metadata. parquet_metadata ( ) . as_ref ( ) . clone ( ) ) ;
997+ return Ok ( Arc :: clone ( parquet_metadata. parquet_metadata ( ) ) ) ;
998998 }
999999 }
10001000 }
@@ -1007,16 +1007,18 @@ pub async fn fetch_parquet_metadata(
10071007 #[ cfg( feature = "parquet_encryption" ) ]
10081008 let reader = reader. with_decryption_properties ( decryption_properties) ;
10091009
1010- let metadata = reader
1011- . load_and_finish ( fetch, file_size)
1012- . await
1013- . map_err ( DataFusionError :: from) ?;
1010+ let metadata = Arc :: new (
1011+ reader
1012+ . load_and_finish ( fetch, file_size)
1013+ . await
1014+ . map_err ( DataFusionError :: from) ?,
1015+ ) ;
10141016
10151017 if cache_metadata {
10161018 if let Some ( cache) = file_metadata_cache {
10171019 cache. put (
10181020 meta,
1019- Arc :: new ( CachedParquetMetaData :: new ( Arc :: new ( metadata . clone ( ) ) ) ) ,
1021+ Arc :: new ( CachedParquetMetaData :: new ( Arc :: clone ( & metadata ) ) ) ,
10201022 ) ;
10211023 }
10221024 }
0 commit comments