@@ -151,9 +151,9 @@ def upload_directory(directory: PathLike,
151151
152152 for file in files :
153153 if isinstance (file , str ):
154- file_path = Path (directory , file )
154+ file_path = Path (directory , file ). expanduser ()
155155 else :
156- file_path = file
156+ file_path = file . expanduser ()
157157
158158 # Check if is present in the file_path_map
159159 # if it is, use the mapped value as the destination path
@@ -196,15 +196,15 @@ def download_directory(directory: str, files: List[str], s3_client: S3Client, bu
196196 """
197197 for file in files :
198198 key = f'{ prefix } /{ file } ' .lstrip ('/' )
199- local_path = Path (directory , file )
199+ local_path = Path (directory , file ). expanduser ()
200200 local_path .parent .mkdir (parents = True , exist_ok = True )
201201
202202 s3_client .download_file (local_path = local_path ,
203203 bucket = bucket ,
204204 key = key )
205205
206206
207- def get_checksum (file : PathLike , checksum_name : str , chunk_size = 1024 * 1024 ) -> str :
207+ def get_checksum (file : Path , checksum_name : str , chunk_size = 1024 * 1024 ) -> str :
208208 from awscrt import checksums
209209 checksum_func_map = {
210210 'CRC32' : checksums .crc32 ,
@@ -217,7 +217,7 @@ def get_checksum(file: PathLike, checksum_name: str, chunk_size=1024 * 1024) ->
217217 raise RuntimeWarning (f"Unsupported checksum type: { checksum_name } " )
218218
219219 crc = 0
220- with open (file , "rb" ) as f :
220+ with file . open ("rb" ) as f :
221221 while True :
222222 chunk = f .read (chunk_size )
223223 if not chunk :
0 commit comments