Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DEPR: deprecate get_values #26409

Merged
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
experiment
  • Loading branch information
jorisvandenbossche committed May 15, 2019
commit 61c32754e54c3360fbdcda04bb52a893d8dc35ab
3 changes: 3 additions & 0 deletions pandas/_libs/lib.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ def values_from_object(obj: object):
""" return my values or the object if we are say an ndarray """
func: object

if getattr(obj, '_typ', '') == 'dataframe':
return obj.values

func = getattr(obj, '_internal_get_values', None)
if func is not None:
obj = func()
Expand Down
2 changes: 2 additions & 0 deletions pandas/core/arrays/categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -1507,6 +1507,8 @@ def get_values(self):
fill_value=np.nan)
return np.array(self)

_internal_get_values = get_values

def check_for_ordered(self, op):
""" assert that we are ordered """
if not self.ordered:
Expand Down
9 changes: 7 additions & 2 deletions pandas/core/arrays/sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ def _sparse_array_op(

if left.sp_index.ngaps == 0 or right.sp_index.ngaps == 0:
with np.errstate(all='ignore'):
result = op(left.get_values(), right.get_values())
result = op(left.to_dense(), right.to_dense())
fill = op(_get_fill(left), _get_fill(right))

if left.sp_index.ngaps == 0:
Expand Down Expand Up @@ -1457,7 +1457,12 @@ def to_dense(self):
return np.asarray(self, dtype=self.sp_values.dtype)

# TODO: Look into deprecating this in favor of `to_dense`.
get_values = to_dense
# get_values = to_dense

def get_values(self):
raise Exception("USING GET_VALUES")

_internal_get_values = to_dense

# ------------------------------------------------------------------------
# IO
Expand Down
6 changes: 3 additions & 3 deletions pandas/core/groupby/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1006,7 +1006,7 @@ def nunique(self, dropna=True):
"""
ids, _, _ = self.grouper.group_info

val = self.obj.get_values()
val = self.obj._internal_get_values()

try:
sorter = np.lexsort((val, ids))
Expand Down Expand Up @@ -1080,7 +1080,7 @@ def value_counts(self, normalize=False, sort=True, ascending=False,
bins=bins)

ids, _, _ = self.grouper.group_info
val = self.obj.get_values()
val = self.obj._internal_get_values()

# groupby removes null keys from groupings
mask = ids != -1
Expand Down Expand Up @@ -1194,7 +1194,7 @@ def count(self):
Count of values within each group.
"""
ids, _, ngroups = self.grouper.group_info
val = self.obj.get_values()
val = self.obj._internal_get_values()

mask = (ids != -1) & ~isna(val)
ids = ensure_platform_int(ids)
Expand Down
2 changes: 2 additions & 0 deletions pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3717,6 +3717,8 @@ def get_values(self):
"""
return self.values

_internal_get_values = get_values

@Appender(IndexOpsMixin.memory_usage.__doc__)
def memory_usage(self, deep=False):
result = super().memory_usage(deep=deep)
Expand Down
2 changes: 2 additions & 0 deletions pandas/core/indexes/category.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,6 +357,8 @@ def get_values(self):
""" return the underlying data as an ndarray """
return self._data.get_values()

_internal_get_values = get_values

def tolist(self):
return self._data.tolist()

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -1894,7 +1894,7 @@ def wrapper(self, other, axis=None):
name=res_name, dtype='bool')

else:
values = self.get_values()
values = self.to_numpy()

with np.errstate(all='ignore'):
res = na_op(values, other)
Expand Down
3 changes: 2 additions & 1 deletion pandas/core/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,7 +505,8 @@ def get_values(self):
numpy.ndarray
Data of the Series.
"""
warnings.warn("deprecated", FutureWarning, stacklevel=2)
raise Exception("USING GET_VALUES")
#warnings.warn("deprecated", FutureWarning, stacklevel=2)
return self._data.get_values()

@property
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/sparse/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -683,7 +683,7 @@ def _reindex_with_indexers(self, reindexers, method=None, fill_value=None,
if col not in self:
continue
if row_indexer is not None:
new_arrays[col] = algos.take_1d(self[col].get_values(),
new_arrays[col] = algos.take_1d(self[col]._internal_get_values(),
row_indexer,
fill_value=fill_value)
else:
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/sparse/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def __unicode__(self):
def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None,
filter_type=None, **kwds):
""" perform a reduction operation """
return op(self.get_values(), skipna=skipna, **kwds)
return op(self.array.to_dense(), skipna=skipna, **kwds)

def __getstate__(self):
# pickling
Expand Down
6 changes: 3 additions & 3 deletions pandas/util/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1043,7 +1043,7 @@ def assert_series_equal(left, right, check_dtype=True,
assert_attr_equal('dtype', left, right)

if check_exact:
assert_numpy_array_equal(left.get_values(), right.get_values(),
assert_numpy_array_equal(left._internal_get_values(), right._internal_get_values(),
check_dtype=check_dtype,
obj='{obj}'.format(obj=obj),)
elif check_datetimelike_compat:
Expand All @@ -1062,7 +1062,7 @@ def assert_series_equal(left, right, check_dtype=True,
'{right}.').format(left=left.values, right=right.values)
raise AssertionError(msg)
else:
assert_numpy_array_equal(left.get_values(), right.get_values(),
assert_numpy_array_equal(left._internal_get_values(), right._internal_get_values(),
check_dtype=check_dtype)
elif is_interval_dtype(left) or is_interval_dtype(right):
assert_interval_array_equal(left.array, right.array)
Expand All @@ -1077,7 +1077,7 @@ def assert_series_equal(left, right, check_dtype=True,
is_extension_array_dtype(right) and not is_categorical_dtype(right)):
assert_extension_array_equal(left.array, right.array)
else:
_testing.assert_almost_equal(left.get_values(), right.get_values(),
_testing.assert_almost_equal(left._internal_get_values(), right._internal_get_values(),
check_less_precise=check_less_precise,
check_dtype=check_dtype,
obj='{obj}'.format(obj=obj))
Expand Down