43
43
]
44
44
45
45
no_limit = [
46
- param (
47
- None , id = "nolimit" , marks = [pytest .mark .notimpl (["dask" , "impala" , "pyspark" ])]
48
- )
46
+ param (None , id = "nolimit" , marks = [pytest .mark .notimpl (["impala" , "pyspark" ])])
49
47
]
50
48
51
49
limit_no_limit = limit + no_limit
@@ -117,7 +115,7 @@ def test_scalar_to_pyarrow_scalar(limit, awards_players):
117
115
}
118
116
119
117
120
- @pytest .mark .notimpl (["dask" , " impala" , "pyspark" , "druid" ])
118
+ @pytest .mark .notimpl (["impala" , "pyspark" , "druid" ])
121
119
def test_table_to_pyarrow_table_schema (con , awards_players ):
122
120
table = awards_players .to_pyarrow ()
123
121
assert isinstance (table , pa .Table )
@@ -136,7 +134,7 @@ def test_table_to_pyarrow_table_schema(con, awards_players):
136
134
assert table .schema == expected_schema
137
135
138
136
139
- @pytest .mark .notimpl (["dask" , " impala" , "pyspark" ])
137
+ @pytest .mark .notimpl (["impala" , "pyspark" ])
140
138
def test_column_to_pyarrow_table_schema (awards_players ):
141
139
expr = awards_players .awardID
142
140
array = expr .to_pyarrow ()
@@ -193,15 +191,15 @@ def test_to_pyarrow_batches_borked_types(batting):
193
191
util .consume (batch_reader )
194
192
195
193
196
- @pytest .mark .notimpl (["dask" , " impala" , "pyspark" ])
194
+ @pytest .mark .notimpl (["impala" , "pyspark" ])
197
195
def test_to_pyarrow_memtable (con ):
198
196
expr = ibis .memtable ({"x" : [1 , 2 , 3 ]})
199
197
table = con .to_pyarrow (expr )
200
198
assert isinstance (table , pa .Table )
201
199
assert len (table ) == 3
202
200
203
201
204
- @pytest .mark .notimpl (["dask" , " impala" , "pyspark" ])
202
+ @pytest .mark .notimpl (["impala" , "pyspark" ])
205
203
def test_to_pyarrow_batches_memtable (con ):
206
204
expr = ibis .memtable ({"x" : [1 , 2 , 3 ]})
207
205
n = 0
@@ -212,7 +210,7 @@ def test_to_pyarrow_batches_memtable(con):
212
210
assert n == 3
213
211
214
212
215
- @pytest .mark .notimpl (["dask" , " impala" , "pyspark" ])
213
+ @pytest .mark .notimpl (["impala" , "pyspark" ])
216
214
def test_table_to_parquet (tmp_path , backend , awards_players ):
217
215
outparquet = tmp_path / "out.parquet"
218
216
awards_players .to_parquet (outparquet )
@@ -265,9 +263,7 @@ def test_roundtrip_partitioned_parquet(tmp_path, con, backend, awards_players):
265
263
backend .assert_frame_equal (reingest .to_pandas (), awards_players .to_pandas ())
266
264
267
265
268
- @pytest .mark .notimpl (
269
- ["dask" , "impala" , "pyspark" ], reason = "No support for exporting files"
270
- )
266
+ @pytest .mark .notimpl (["impala" , "pyspark" ], reason = "No support for exporting files" )
271
267
@pytest .mark .parametrize ("ftype" , ["csv" , "parquet" ])
272
268
def test_memtable_to_file (tmp_path , con , ftype , monkeypatch ):
273
269
"""
@@ -288,7 +284,7 @@ def test_memtable_to_file(tmp_path, con, ftype, monkeypatch):
288
284
assert outfile .is_file ()
289
285
290
286
291
- @pytest .mark .notimpl (["dask" , " impala" , "pyspark" ])
287
+ @pytest .mark .notimpl (["impala" , "pyspark" ])
292
288
def test_table_to_csv (tmp_path , backend , awards_players ):
293
289
outcsv = tmp_path / "out.csv"
294
290
@@ -314,7 +310,6 @@ def test_table_to_csv(tmp_path, backend, awards_players):
314
310
["impala" ], raises = AttributeError , reason = "fetchmany doesn't exist"
315
311
),
316
312
pytest .mark .notyet (["druid" ], raises = sa .exc .ProgrammingError ),
317
- pytest .mark .notyet (["dask" ], raises = NotImplementedError ),
318
313
pytest .mark .notyet (["pyspark" ], raises = NotImplementedError ),
319
314
],
320
315
),
@@ -329,7 +324,6 @@ def test_table_to_csv(tmp_path, backend, awards_players):
329
324
["druid" , "snowflake" , "trino" ], raises = sa .exc .ProgrammingError
330
325
),
331
326
pytest .mark .notyet (["oracle" ], raises = sa .exc .DatabaseError ),
332
- pytest .mark .notyet (["dask" ], raises = NotImplementedError ),
333
327
pytest .mark .notyet (["mssql" , "mysql" ], raises = sa .exc .OperationalError ),
334
328
pytest .mark .notyet (["pyspark" ], raises = ParseException ),
335
329
],
@@ -390,7 +384,6 @@ def test_roundtrip_delta(con, alltypes, tmp_path, monkeypatch):
390
384
@pytest .mark .xfail_version (
391
385
duckdb = ["duckdb<0.8.1" ], raises = AssertionError , reason = "bug in duckdb"
392
386
)
393
- @pytest .mark .notimpl (["dask" ], raises = NotImplementedError )
394
387
@pytest .mark .notimpl (
395
388
["druid" ], raises = AttributeError , reason = "string type is used for timestamp_col"
396
389
)
@@ -419,7 +412,7 @@ def test_arrow_timestamp_with_time_zone(alltypes):
419
412
assert batch .schema .types == expected
420
413
421
414
422
- @pytest .mark .notimpl (["dask" , " druid" ])
415
+ @pytest .mark .notimpl (["druid" ])
423
416
@pytest .mark .notimpl (
424
417
["impala" ], raises = AttributeError , reason = "missing `fetchmany` on the cursor"
425
418
)
@@ -447,7 +440,7 @@ def test_empty_memtable(backend, con):
447
440
backend .assert_frame_equal (result , expected )
448
441
449
442
450
- @pytest .mark .notimpl (["dask" , " flink" , "impala" , "pyspark" ])
443
+ @pytest .mark .notimpl (["flink" , "impala" , "pyspark" ])
451
444
def test_to_pandas_batches_empty_table (backend , con ):
452
445
t = backend .functional_alltypes .limit (0 )
453
446
n = t .count ().execute ()
@@ -456,7 +449,7 @@ def test_to_pandas_batches_empty_table(backend, con):
456
449
assert sum (map (len , t .to_pandas_batches ())) == n
457
450
458
451
459
- @pytest .mark .notimpl (["dask" , " druid" , "flink" , "impala" , "pyspark" ])
452
+ @pytest .mark .notimpl (["druid" , "flink" , "impala" , "pyspark" ])
460
453
@pytest .mark .parametrize ("n" , [None , 1 ])
461
454
def test_to_pandas_batches_nonempty_table (backend , con , n ):
462
455
t = backend .functional_alltypes .limit (n )
@@ -466,7 +459,7 @@ def test_to_pandas_batches_nonempty_table(backend, con, n):
466
459
assert sum (map (len , t .to_pandas_batches ())) == n
467
460
468
461
469
- @pytest .mark .notimpl (["dask" , " flink" , "impala" , "pyspark" ])
462
+ @pytest .mark .notimpl (["flink" , "impala" , "pyspark" ])
470
463
@pytest .mark .parametrize ("n" , [None , 0 , 1 , 2 ])
471
464
def test_to_pandas_batches_column (backend , con , n ):
472
465
t = backend .functional_alltypes .limit (n ).timestamp_col
@@ -476,7 +469,7 @@ def test_to_pandas_batches_column(backend, con, n):
476
469
assert sum (map (len , t .to_pandas_batches ())) == n
477
470
478
471
479
- @pytest .mark .notimpl (["dask" , " druid" , "flink" , "impala" , "pyspark" ])
472
+ @pytest .mark .notimpl (["druid" , "flink" , "impala" , "pyspark" ])
480
473
def test_to_pandas_batches_scalar (backend , con ):
481
474
t = backend .functional_alltypes .timestamp_col .max ()
482
475
expected = t .execute ()
0 commit comments