|
3 | 3 | import os |
4 | 4 | import tempfile |
5 | 5 |
|
| 6 | +import pytest |
| 7 | + |
6 | 8 | from dvuploader.dvuploader import DVUploader |
7 | 9 | from dvuploader.file import File |
8 | 10 |
|
@@ -320,3 +322,124 @@ def test_native_upload_with_large_tabular_files_parallel( |
320 | 322 | dataverse_url=BASE_URL, |
321 | 323 | n_parallel_uploads=10, |
322 | 324 | ) |
| 325 | + |
| 326 | + def test_zip_file_upload( |
| 327 | + self, |
| 328 | + credentials, |
| 329 | + ): |
| 330 | + BASE_URL, API_TOKEN = credentials |
| 331 | + |
| 332 | + # Create Dataset |
| 333 | + pid = create_dataset( |
| 334 | + parent="Root", |
| 335 | + server_url=BASE_URL, |
| 336 | + api_token=API_TOKEN, |
| 337 | + ) |
| 338 | + |
| 339 | + # Arrange |
| 340 | + files = [ |
| 341 | + File(filepath="tests/fixtures/archive.zip"), |
| 342 | + ] |
| 343 | + |
| 344 | + # Act |
| 345 | + uploader = DVUploader(files=files) |
| 346 | + uploader.upload( |
| 347 | + persistent_id=pid, |
| 348 | + api_token=API_TOKEN, |
| 349 | + dataverse_url=BASE_URL, |
| 350 | + n_parallel_uploads=10, |
| 351 | + ) |
| 352 | + |
| 353 | + # Assert |
| 354 | + files = retrieve_dataset_files( |
| 355 | + dataverse_url=BASE_URL, |
| 356 | + persistent_id=pid, |
| 357 | + api_token=API_TOKEN, |
| 358 | + ) |
| 359 | + |
| 360 | + assert len(files) == 5, f"Expected 5 files, got {len(files)}" |
| 361 | + |
| 362 | + expected_files = [ |
| 363 | + "hallo.tab", |
| 364 | + "hallo2.tab", |
| 365 | + "hallo3.tab", |
| 366 | + "hallo4.tab", |
| 367 | + "hallo5.tab", |
| 368 | + ] |
| 369 | + |
| 370 | + assert sorted([file["label"] for file in files]) == sorted(expected_files) |
| 371 | + |
| 372 | + def test_zipzip_file_upload( |
| 373 | + self, |
| 374 | + credentials, |
| 375 | + ): |
| 376 | + BASE_URL, API_TOKEN = credentials |
| 377 | + |
| 378 | + # Create Dataset |
| 379 | + pid = create_dataset( |
| 380 | + parent="Root", |
| 381 | + server_url=BASE_URL, |
| 382 | + api_token=API_TOKEN, |
| 383 | + ) |
| 384 | + |
| 385 | + # Arrange |
| 386 | + files = [ |
| 387 | + File(filepath="tests/fixtures/archive.zip"), |
| 388 | + ] |
| 389 | + |
| 390 | + # Act |
| 391 | + uploader = DVUploader(files=files) |
| 392 | + uploader.upload( |
| 393 | + persistent_id=pid, |
| 394 | + api_token=API_TOKEN, |
| 395 | + dataverse_url=BASE_URL, |
| 396 | + n_parallel_uploads=10, |
| 397 | + ) |
| 398 | + |
| 399 | + # Assert |
| 400 | + files = retrieve_dataset_files( |
| 401 | + dataverse_url=BASE_URL, |
| 402 | + persistent_id=pid, |
| 403 | + api_token=API_TOKEN, |
| 404 | + ) |
| 405 | + |
| 406 | + assert len(files) == 5, f"Expected 5 files, got {len(files)}" |
| 407 | + |
| 408 | + expected_files = [ |
| 409 | + "hallo.tab", |
| 410 | + "hallo2.tab", |
| 411 | + "hallo3.tab", |
| 412 | + "hallo4.tab", |
| 413 | + "hallo5.tab", |
| 414 | + ] |
| 415 | + |
| 416 | + assert sorted([file["label"] for file in files]) == sorted(expected_files) |
| 417 | + |
| 418 | + def test_too_many_zip_files( |
| 419 | + self, |
| 420 | + credentials, |
| 421 | + ): |
| 422 | + BASE_URL, API_TOKEN = credentials |
| 423 | + |
| 424 | + # Create Dataset |
| 425 | + pid = create_dataset( |
| 426 | + parent="Root", |
| 427 | + server_url=BASE_URL, |
| 428 | + api_token=API_TOKEN, |
| 429 | + ) |
| 430 | + |
| 431 | + # Arrange |
| 432 | + files = [ |
| 433 | + File(filepath="tests/fixtures/many_files.zip"), |
| 434 | + ] |
| 435 | + |
| 436 | + # Act |
| 437 | + uploader = DVUploader(files=files) |
| 438 | + |
| 439 | + with pytest.raises(ValueError): |
| 440 | + uploader.upload( |
| 441 | + persistent_id=pid, |
| 442 | + api_token=API_TOKEN, |
| 443 | + dataverse_url=BASE_URL, |
| 444 | + n_parallel_uploads=10, |
| 445 | + ) |
0 commit comments