Skip to content

Commit

Permalink
STYLE: flake8 upgraded to 3.3 on conda (pandas-dev#15412)
Browse files Browse the repository at this point in the history
fixes for E305, 2 blank lines after a class definition
  • Loading branch information
jreback authored Feb 15, 2017
1 parent e351ed0 commit 93f5e3a
Show file tree
Hide file tree
Showing 38 changed files with 74 additions and 6 deletions.
1 change: 1 addition & 0 deletions pandas/compat/numpy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ def np_array_datetime64_compat(arr, *args, **kwargs):

return np.array(arr, *args, **kwargs)


__all__ = ['np',
'_np_version_under1p8',
'_np_version_under1p9',
Expand Down
7 changes: 7 additions & 0 deletions pandas/compat/numpy/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ def __call__(self, args, kwargs, fname=None,
raise ValueError("invalid validation method "
"'{method}'".format(method=method))


ARGMINMAX_DEFAULTS = dict(out=None)
validate_argmin = CompatValidator(ARGMINMAX_DEFAULTS, fname='argmin',
method='both', max_fname_arg_count=1)
Expand Down Expand Up @@ -97,6 +98,7 @@ def validate_argmax_with_skipna(skipna, args, kwargs):
validate_argmax(args, kwargs)
return skipna


ARGSORT_DEFAULTS = OrderedDict()
ARGSORT_DEFAULTS['axis'] = -1
ARGSORT_DEFAULTS['kind'] = 'quicksort'
Expand All @@ -121,6 +123,7 @@ def validate_argsort_with_ascending(ascending, args, kwargs):
validate_argsort(args, kwargs, max_fname_arg_count=1)
return ascending


CLIP_DEFAULTS = dict(out=None)
validate_clip = CompatValidator(CLIP_DEFAULTS, fname='clip',
method='both', max_fname_arg_count=3)
Expand All @@ -141,6 +144,7 @@ def validate_clip_with_axis(axis, args, kwargs):
validate_clip(args, kwargs)
return axis


COMPRESS_DEFAULTS = OrderedDict()
COMPRESS_DEFAULTS['axis'] = None
COMPRESS_DEFAULTS['out'] = None
Expand Down Expand Up @@ -170,6 +174,7 @@ def validate_cum_func_with_skipna(skipna, args, kwargs, name):
validate_cum_func(args, kwargs, fname=name)
return skipna


LOGICAL_FUNC_DEFAULTS = dict(out=None)
validate_logical_func = CompatValidator(LOGICAL_FUNC_DEFAULTS, method='kwargs')

Expand Down Expand Up @@ -236,6 +241,7 @@ def validate_take_with_convert(convert, args, kwargs):
validate_take(args, kwargs, max_fname_arg_count=3, method='both')
return convert


TRANSPOSE_DEFAULTS = dict(axes=None)
validate_transpose = CompatValidator(TRANSPOSE_DEFAULTS, fname='transpose',
method='both', max_fname_arg_count=0)
Expand Down Expand Up @@ -318,6 +324,7 @@ def validate_groupby_func(name, args, kwargs, allowed=None):
"with groupby. Use .groupby(...)."
"{func}() instead".format(func=name)))


RESAMPLER_NUMPY_OPS = ('min', 'max', 'sum', 'prod',
'mean', 'std', 'var')

Expand Down
1 change: 1 addition & 0 deletions pandas/computation/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,6 +669,7 @@ def visitor(x, y):
operands = node.values
return reduce(visitor, operands)


# ast.Call signature changed on 3.5,
# conditionally change which methods is named
# visit_Call depending on Python version, #11097
Expand Down
2 changes: 2 additions & 0 deletions pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -926,6 +926,7 @@ def _finalize_nsmallest(arr, kth_val, n, keep, narr):
else:
return inds


_dtype_map = {'datetime64[ns]': 'int64', 'timedelta64[ns]': 'int64'}


Expand Down Expand Up @@ -959,6 +960,7 @@ def _hashtable_algo(f, values, return_dtype=None):
# use Object
return f(htable.PyObjectHashTable, _ensure_object)


_hashtables = {
'float64': (htable.Float64HashTable, htable.Float64Vector),
'uint64': (htable.UInt64HashTable, htable.UInt64Vector),
Expand Down
1 change: 1 addition & 0 deletions pandas/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -804,6 +804,7 @@ def inner(x):

return inner


# common type validators, for convenience
# usage: register_option(... , validator = is_int)
is_int = is_type_factory(int)
Expand Down
2 changes: 2 additions & 0 deletions pandas/core/config_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,7 @@ def mpl_style_cb(key):

return val


with cf.config_prefix('display'):
cf.register_option('precision', 6, pc_precision_doc, validator=is_int)
cf.register_option('float_format', None, float_format_doc,
Expand Down Expand Up @@ -380,6 +381,7 @@ def use_inf_as_null_cb(key):
from pandas.types.missing import _use_inf_as_null
_use_inf_as_null(key)


with cf.config_prefix('mode'):
cf.register_option('use_inf_as_null', False, use_inf_as_null_doc,
cb=use_inf_as_null_cb)
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -5741,9 +5741,9 @@ def _from_nested_dict(data):
def _put_str(s, space):
return ('%s' % s)[:space].ljust(space)


# ----------------------------------------------------------------------
# Add plotting methods to DataFrame

DataFrame.plot = base.AccessorProperty(gfx.FramePlotMethods,
gfx.FramePlotMethods)
DataFrame.hist = gfx.hist_frame
Expand Down
2 changes: 2 additions & 0 deletions pandas/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def get_indexers_list():
('iat', _iAtIndexer),
]


# "null slice"
_NS = slice(None, None)

Expand Down Expand Up @@ -1850,6 +1851,7 @@ def _convert_key(self, key, is_setter=False):
"indexers")
return key


# 32-bit floating point machine epsilon
_eps = np.finfo('f4').eps

Expand Down
2 changes: 1 addition & 1 deletion pandas/formats/format.py
Original file line number Diff line number Diff line change
Expand Up @@ -2479,9 +2479,9 @@ def _has_names(index):
else:
return index.name is not None


# -----------------------------------------------------------------------------
# Global formatting options

_initial_defencoding = None


Expand Down
3 changes: 3 additions & 0 deletions pandas/indexes/numeric.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ def _assert_safe_casting(cls, data, subarr):
raise TypeError('Unsafe NumPy casting, you must '
'explicitly cast')


Int64Index._add_numeric_methods()
Int64Index._add_logical_methods()

Expand Down Expand Up @@ -238,6 +239,7 @@ def _assert_safe_casting(cls, data, subarr):
raise TypeError('Unsafe NumPy casting, you must '
'explicitly cast')


UInt64Index._add_numeric_methods()
UInt64Index._add_logical_methods()

Expand Down Expand Up @@ -391,5 +393,6 @@ def isin(self, values, level=None):
return lib.ismember_nans(np.array(self), value_set,
isnull(list(value_set)).any())


Float64Index._add_numeric_methods()
Float64Index._add_logical_methods_disabled()
1 change: 1 addition & 0 deletions pandas/indexes/range.py
Original file line number Diff line number Diff line change
Expand Up @@ -652,5 +652,6 @@ def _evaluate_numeric_binop(self, other):
reversed=True,
step=operator.div)


RangeIndex._add_numeric_methods()
RangeIndex._add_logical_methods()
2 changes: 2 additions & 0 deletions pandas/io/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ class ParserError(ValueError):
"""
pass


# gh-12665: Alias for now and remove later.
CParserError = ParserError

Expand Down Expand Up @@ -116,6 +117,7 @@ def __iter__(self):
def __next__(self):
raise AbstractMethodError(self)


if not compat.PY3:
BaseIterator.next = lambda self: self.__next__()

Expand Down
5 changes: 5 additions & 0 deletions pandas/io/excel.py
Original file line number Diff line number Diff line change
Expand Up @@ -886,12 +886,14 @@ def _convert_to_style(cls, style_dict):

return xls_style


register_writer(_Openpyxl1Writer)


class _OpenpyxlWriter(_Openpyxl1Writer):
engine = 'openpyxl'


register_writer(_OpenpyxlWriter)


Expand Down Expand Up @@ -1368,6 +1370,7 @@ def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
for k, v in style_kwargs.items():
setattr(xcell, k, v)


register_writer(_Openpyxl22Writer)


Expand Down Expand Up @@ -1491,6 +1494,7 @@ def _convert_to_style(cls, style_dict, num_format_str=None):

return style


register_writer(_XlwtWriter)


Expand Down Expand Up @@ -1603,4 +1607,5 @@ def _convert_to_style(self, style_dict, num_format_str=None):

return xl_format


register_writer(_XlsxWriter)
1 change: 1 addition & 0 deletions pandas/io/gbq.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def _test_google_api_imports():
raise ImportError("Missing module required for Google BigQuery "
"support: {0}".format(str(e)))


logger = logging.getLogger('pandas.io.gbq')
logger.setLevel(logging.ERROR)

Expand Down
2 changes: 2 additions & 0 deletions pandas/io/packers.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ def read(fh):

raise ValueError('path_or_buf needs to be a string file path or file-like')


dtype_dict = {21: np.dtype('M8[ns]'),
u('datetime64[ns]'): np.dtype('M8[ns]'),
u('datetime64[us]'): np.dtype('M8[us]'),
Expand All @@ -237,6 +238,7 @@ def dtype_for(t):
return dtype_dict[t]
return np.typeDict.get(t, t)


c2f_dict = {'complex': np.float64,
'complex128': np.float64,
'complex64': np.float32}
Expand Down
2 changes: 2 additions & 0 deletions pandas/io/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,7 @@ def _read(filepath_or_buffer, kwds):

return data


_parser_defaults = {
'delimiter': None,

Expand Down Expand Up @@ -655,6 +656,7 @@ def parser_f(filepath_or_buffer,

return parser_f


read_csv = _make_parser_function('read_csv', sep=',')
read_csv = Appender(_read_csv_doc)(read_csv)

Expand Down
4 changes: 4 additions & 0 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def _ensure_encoding(encoding):
encoding = _default_encoding
return encoding


Term = Expr


Expand Down Expand Up @@ -112,6 +113,7 @@ class ClosedFileError(Exception):
class IncompatibilityWarning(Warning):
pass


incompatibility_doc = """
where criteria is being ignored as this version [%s] is too old (or
not-defined), read the file in and write it out to a new file to upgrade (with
Expand All @@ -122,6 +124,7 @@ class IncompatibilityWarning(Warning):
class AttributeConflictWarning(Warning):
pass


attribute_conflict_doc = """
the [%s] attribute of the existing index is [%s] which conflicts with the new
[%s], resetting the attribute to None
Expand All @@ -131,6 +134,7 @@ class AttributeConflictWarning(Warning):
class DuplicateWarning(Warning):
pass


duplicate_doc = """
duplicate entries in table, taking most recently appended
"""
Expand Down
1 change: 1 addition & 0 deletions pandas/io/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,6 +495,7 @@ def has_table(table_name, con, flavor=None, schema=None):
pandas_sql = pandasSQL_builder(con, flavor=flavor, schema=schema)
return pandas_sql.has_table(table_name)


table_exists = has_table


Expand Down
1 change: 1 addition & 0 deletions pandas/io/stata.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,6 +459,7 @@ class PossiblePrecisionLoss(Warning):
class ValueLabelTypeMismatch(Warning):
pass


value_label_mismatch_doc = """
Stata value labels (pandas categories) must be strings. Column {0} contains
non-string labels which will be converted to strings. Please check that the
Expand Down
1 change: 1 addition & 0 deletions pandas/msgpack/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def packb(o, **kwargs):
"""
return Packer(**kwargs).pack(o)


# alias for compatibility to simplejson/marshal/pickle.
load = unpack
loads = unpackb
Expand Down
1 change: 1 addition & 0 deletions pandas/sparse/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -863,6 +863,7 @@ def homogenize(series_dict):

return output


# use unaccelerated ops for sparse objects
ops.add_flex_arithmetic_methods(SparseDataFrame, use_numexpr=False,
**ops.frame_flex_funcs)
Expand Down
1 change: 1 addition & 0 deletions pandas/sparse/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -832,6 +832,7 @@ def from_coo(cls, A, dense_index=False):
"""
return _coo_to_sparse_series(A, dense_index=dense_index)


# overwrite series methods with unaccelerated versions
ops.add_special_arithmetic_methods(SparseSeries, use_numexpr=False,
**ops.series_special_funcs)
Expand Down
3 changes: 3 additions & 0 deletions pandas/stats/moments.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,7 @@ def ewmstd(arg, com=None, span=None, halflife=None, alpha=None, min_periods=0,
bias=bias,
func_kw=['bias'])


ewmvol = ewmstd


Expand Down Expand Up @@ -476,6 +477,7 @@ def f(arg, window, min_periods=None, freq=None, center=False,
**kwargs)
return f


rolling_max = _rolling_func('max', 'Moving maximum.', how='max')
rolling_min = _rolling_func('min', 'Moving minimum.', how='min')
rolling_sum = _rolling_func('sum', 'Moving sum.')
Expand Down Expand Up @@ -683,6 +685,7 @@ def f(arg, min_periods=1, freq=None, **kwargs):
**kwargs)
return f


expanding_max = _expanding_func('max', 'Expanding maximum.')
expanding_min = _expanding_func('min', 'Expanding minimum.')
expanding_sum = _expanding_func('sum', 'Expanding sum.')
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/sparse/test_libsparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,8 +560,8 @@ def _check_case(xloc, xlen, yloc, ylen, eloc, elen):

check_cases(_check_case)

# too cute? oh but how I abhor code duplication

# too cute? oh but how I abhor code duplication
check_ops = ['add', 'sub', 'mul', 'truediv', 'floordiv']


Expand Down
1 change: 1 addition & 0 deletions pandas/tests/test_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1588,6 +1588,7 @@ def test_to_xarray(self):
# non-convertible
self.assertRaises(ValueError, lambda: result.to_pandas())


# run all the tests, but wrap each in a warning catcher
for t in ['test_rename', 'test_rename_axis', 'test_get_numeric_data',
'test_get_default', 'test_nonzero',
Expand Down
Loading

0 comments on commit 93f5e3a

Please sign in to comment.