From 4950201da04a30ada526718a929c3258f1393bcd Mon Sep 17 00:00:00 2001 From: Aaron Critchley Date: Tue, 31 Jul 2018 10:53:40 +0100 Subject: [PATCH 1/4] Update base.py --- pandas/core/indexes/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 2a191ef76473b..2c8803d4f8741 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -147,6 +147,7 @@ def index_arithmetic_method(self, other): name = '__{name}__'.format(name=op.__name__) # TODO: docstring? + foo = 'bar' # Never commit, just testing CI return set_function_name(index_arithmetic_method, name, cls) From 1a9d3f9c941435671599b9510bbd5ac3c0d956cc Mon Sep 17 00:00:00 2001 From: Aaron Critchley Date: Mon, 27 Aug 2018 04:16:06 +0100 Subject: [PATCH 2/4] Progress on #18419 --- doc/source/conf.py | 6 +++--- pandas/compat/pickle_compat.py | 24 ++++++++++-------------- pandas/core/frame.py | 4 ++-- pandas/core/indexing.py | 6 +++--- pandas/core/nanops.py | 2 +- pandas/util/_print_versions.py | 6 +++--- pandas/util/_validators.py | 2 +- 7 files changed, 23 insertions(+), 27 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 29f947e1144ea..e10b788ba7b22 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -565,19 +565,19 @@ def linkcode_resolve(domain, info): for part in fullname.split('.'): try: obj = getattr(obj, part) - except: + except AttributeError: return None try: fn = inspect.getsourcefile(obj) - except: + except TypeError: fn = None if not fn: return None try: source, lineno = inspect.getsourcelines(obj) - except: + except OSError: lineno = None if lineno: diff --git a/pandas/compat/pickle_compat.py b/pandas/compat/pickle_compat.py index c1a9a9fc1ed13..7e6cbedd58cf2 100644 --- a/pandas/compat/pickle_compat.py +++ b/pandas/compat/pickle_compat.py @@ -33,7 +33,7 @@ def load_reduce(self): cls = args[0] stack[-1] = object.__new__(cls) return - except: + except Exception: pass # try to re-encode the arguments @@ -44,7 +44,7 @@ def load_reduce(self): try: stack[-1] = func(*args) return - except: + except Exception: pass # unknown exception, re-raise @@ -182,7 +182,7 @@ def load_newobj_ex(self): try: Unpickler.dispatch[pkl.NEWOBJ_EX[0]] = load_newobj_ex -except: +except Exception: pass @@ -200,15 +200,11 @@ def load(fh, encoding=None, compat=False, is_verbose=False): compat: provide Series compatibility mode, boolean, default False is_verbose: show exception output """ + fh.seek(0) + if encoding is not None: + up = Unpickler(fh, encoding=encoding) + else: + up = Unpickler(fh) + up.is_verbose = is_verbose - try: - fh.seek(0) - if encoding is not None: - up = Unpickler(fh, encoding=encoding) - else: - up = Unpickler(fh) - up.is_verbose = is_verbose - - return up.load() - except: - raise + return up.load() diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 4faf4e88e5a3c..46be767d7ff8a 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -3187,7 +3187,7 @@ def _ensure_valid_index(self, value): if not len(self.index) and is_list_like(value): try: value = Series(value) - except: + except ValueError: raise ValueError('Cannot set a frame with no defined index ' 'and a value that cannot be converted to a ' 'Series') @@ -7621,7 +7621,7 @@ def convert(v): values = np.array([convert(v) for v in values]) else: values = convert(values) - except: + except Exception: values = convert(values) else: diff --git a/pandas/core/indexing.py b/pandas/core/indexing.py index a245ecfa007f3..b83c08d3bd9d2 100755 --- a/pandas/core/indexing.py +++ b/pandas/core/indexing.py @@ -2146,7 +2146,7 @@ def _getitem_tuple(self, tup): self._has_valid_tuple(tup) try: return self._getitem_lowerdim(tup) - except: + except IndexingError: pass retval = self.obj @@ -2705,13 +2705,13 @@ def maybe_droplevels(index, key): for _ in key: try: index = index.droplevel(0) - except: + except ValueError: # we have dropped too much, so back out return original_index else: try: index = index.droplevel(0) - except: + except ValueError: pass return index diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index f44fb4f6e9e14..96701915abe42 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -503,7 +503,7 @@ def reduction(values, axis=None, skipna=True): try: result = getattr(values, meth)(axis, dtype=dtype_max) result.fill(np.nan) - except: + except AttributeError: result = np.nan else: result = getattr(values, meth)(axis) diff --git a/pandas/util/_print_versions.py b/pandas/util/_print_versions.py index 5600834f3b615..252491511105f 100644 --- a/pandas/util/_print_versions.py +++ b/pandas/util/_print_versions.py @@ -21,7 +21,7 @@ def get_sys_info(): stdout=subprocess.PIPE, stderr=subprocess.PIPE) so, serr = pipe.communicate() - except: + except ValueError: pass else: if pipe.returncode == 0: @@ -50,7 +50,7 @@ def get_sys_info(): ("LANG", "{lang}".format(lang=os.environ.get('LANG', "None"))), ("LOCALE", '.'.join(map(str, locale.getlocale()))), ]) - except: + except Exception: pass return blob @@ -108,7 +108,7 @@ def show_versions(as_json=False): mod = importlib.import_module(modname) ver = ver_f(mod) deps_blob.append((modname, ver)) - except: + except Exception: deps_blob.append((modname, None)) if (as_json): diff --git a/pandas/util/_validators.py b/pandas/util/_validators.py index a96563051e7de..51c8c95b63b10 100644 --- a/pandas/util/_validators.py +++ b/pandas/util/_validators.py @@ -59,7 +59,7 @@ def _check_for_default_values(fname, arg_val_dict, compat_args): # could not compare them directly, so try comparison # using the 'is' operator - except: + except Exception: match = (arg_val_dict[key] is compat_args[key]) if not match: From 1585821eae91338594f11c0189e516e685e90eba Mon Sep 17 00:00:00 2001 From: Aaron Critchley Date: Mon, 27 Aug 2018 07:58:12 +0100 Subject: [PATCH 3/4] Completing 18419 --- doc/source/whatsnew/v0.24.0.txt | 2 +- pandas/core/computation/pytables.py | 4 +- pandas/core/dtypes/common.py | 2 +- pandas/core/dtypes/dtypes.py | 8 ++-- pandas/core/indexes/frozen.py | 2 +- pandas/core/indexes/multi.py | 15 ++++---- pandas/core/internals/blocks.py | 10 ++--- pandas/core/nanops.py | 2 +- pandas/core/ops.py | 2 +- pandas/core/sparse/array.py | 2 +- pandas/core/tools/datetimes.py | 10 ++--- pandas/core/window.py | 2 +- pandas/io/clipboards.py | 2 +- pandas/io/formats/console.py | 8 ++-- pandas/io/formats/terminal.py | 10 ++--- pandas/io/packers.py | 2 +- pandas/io/parsers.py | 10 ++--- pandas/io/pickle.py | 4 +- pandas/io/pytables.py | 53 +++++++++++++------------- pandas/io/sas/sas_xport.py | 2 +- pandas/io/sas/sasreader.py | 2 +- pandas/io/sql.py | 6 +-- pandas/io/stata.py | 4 +- pandas/tests/frame/test_arithmetic.py | 2 +- pandas/tests/indexing/common.py | 4 +- pandas/tests/io/formats/test_format.py | 4 +- pandas/tests/io/test_pytables.py | 8 ++-- pandas/tests/io/test_sql.py | 6 +-- pandas/tests/test_multilevel.py | 2 +- pandas/tests/test_nanops.py | 8 ++-- pandas/tests/test_panel.py | 4 +- pandas/tests/test_strings.py | 2 +- pandas/tseries/holiday.py | 6 +-- 33 files changed, 103 insertions(+), 107 deletions(-) diff --git a/doc/source/whatsnew/v0.24.0.txt b/doc/source/whatsnew/v0.24.0.txt index 1979bde796452..2399bb0686503 100644 --- a/doc/source/whatsnew/v0.24.0.txt +++ b/doc/source/whatsnew/v0.24.0.txt @@ -737,7 +737,7 @@ Build Changes - Building pandas for development now requires ``cython >= 0.28.2`` (:issue:`21688`) - Testing pandas now requires ``hypothesis>=3.58`` (:issue:22280). You can find `the Hypothesis docs here `_, and a pandas-specific introduction :ref:`in the contributing guide ` . -- +- ci/lint.sh now supports flake8 > 3.4.1 (:issue:`18419`) Other ^^^^^ diff --git a/pandas/core/computation/pytables.py b/pandas/core/computation/pytables.py index 2bd1b0c5b3507..d169c4b7c6b0f 100644 --- a/pandas/core/computation/pytables.py +++ b/pandas/core/computation/pytables.py @@ -405,13 +405,13 @@ def visit_Assign(self, node, **kwargs): return self.visit(cmpr) def visit_Subscript(self, node, **kwargs): - # only allow simple suscripts + # only allow simple subscripts value = self.visit(node.value) slobj = self.visit(node.slice) try: value = value.value - except: + except AttributeError: pass try: diff --git a/pandas/core/dtypes/common.py b/pandas/core/dtypes/common.py index b8cbb41501dd1..0dc428f3c37bf 100644 --- a/pandas/core/dtypes/common.py +++ b/pandas/core/dtypes/common.py @@ -440,7 +440,7 @@ def is_timedelta64_dtype(arr_or_dtype): return False try: tipo = _get_dtype_type(arr_or_dtype) - except: + except Exception: return False return issubclass(tipo, np.timedelta64) diff --git a/pandas/core/dtypes/dtypes.py b/pandas/core/dtypes/dtypes.py index f53ccc86fc4ff..a8a25ab4759d5 100644 --- a/pandas/core/dtypes/dtypes.py +++ b/pandas/core/dtypes/dtypes.py @@ -344,10 +344,8 @@ def construct_from_string(cls, string): try: if string == 'category': return cls() - except: - pass - - raise TypeError("cannot construct a CategoricalDtype") + except Exception: + TypeError("cannot construct a CategoricalDtype") @staticmethod def validate_ordered(ordered): @@ -499,7 +497,7 @@ def __new__(cls, unit=None, tz=None): if m is not None: unit = m.groupdict()['unit'] tz = m.groupdict()['tz'] - except: + except Exception: raise ValueError("could not construct DatetimeTZDtype") elif isinstance(unit, compat.string_types): diff --git a/pandas/core/indexes/frozen.py b/pandas/core/indexes/frozen.py index 3c6b922178abf..9ecb7538109b7 100644 --- a/pandas/core/indexes/frozen.py +++ b/pandas/core/indexes/frozen.py @@ -136,7 +136,7 @@ def searchsorted(self, v, side='left', sorter=None): # https://github.com/numpy/numpy/issues/5370 try: v = self.dtype.type(v) - except: + except Exception: pass return super(FrozenNDArray, self).searchsorted( v, side=side, sorter=sorter) diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py index 955f1461075f9..a499bc7b34428 100644 --- a/pandas/core/indexes/multi.py +++ b/pandas/core/indexes/multi.py @@ -980,12 +980,12 @@ def _try_mi(k): return _try_mi(key) except (KeyError): raise - except: + except Exception: pass try: return _try_mi(Timestamp(key)) - except: + except Exception: pass raise InvalidIndexError(key) @@ -1640,7 +1640,7 @@ def append(self, other): # if all(isinstance(x, MultiIndex) for x in other): try: return MultiIndex.from_tuples(new_tuples, names=self.names) - except: + except TypeError: return Index(new_tuples) def argsort(self, *args, **kwargs): @@ -2269,8 +2269,7 @@ def maybe_droplevels(indexer, levels, drop_level): for i in sorted(levels, reverse=True): try: new_index = new_index.droplevel(i) - except: - + except ValueError: # no dropping here return orig_index return new_index @@ -2769,11 +2768,11 @@ def _convert_can_do_setop(self, other): labels=[[]] * self.nlevels, verify_integrity=False) else: - msg = 'other must be a MultiIndex or a list of tuples' try: other = MultiIndex.from_tuples(other) - except: - raise TypeError(msg) + except TypeError: + raise TypeError('other must be a MultiIndex or a list ' + 'of tuples.') else: result_names = self.names if self.names == other.names else None return other, result_names diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index e735b35653cd4..b814ce6d37a5e 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -666,7 +666,7 @@ def _astype(self, dtype, copy=False, errors='raise', values=None, newb = make_block(values, placement=self.mgr_locs, klass=klass, ndim=self.ndim) - except: + except Exception: if errors == 'raise': raise newb = self.copy() if copy else self @@ -1142,7 +1142,7 @@ def check_int_bool(self, inplace): # a fill na type method try: m = missing.clean_fill_method(method) - except: + except ValueError: m = None if m is not None: @@ -1157,7 +1157,7 @@ def check_int_bool(self, inplace): # try an interp method try: m = missing.clean_interp_method(method, **kwargs) - except: + except ValueError: m = None if m is not None: @@ -2438,7 +2438,7 @@ def set(self, locs, values, check=False): try: if (self.values[locs] == values).all(): return - except: + except Exception: pass try: self.values[locs] = values @@ -3172,7 +3172,7 @@ def _astype(self, dtype, copy=False, errors='raise', values=None, def __len__(self): try: return self.sp_index.length - except: + except Exception: return 0 def copy(self, deep=True, mgr=None): diff --git a/pandas/core/nanops.py b/pandas/core/nanops.py index 96701915abe42..2b87502ad1c5c 100644 --- a/pandas/core/nanops.py +++ b/pandas/core/nanops.py @@ -813,7 +813,7 @@ def _ensure_numeric(x): elif is_object_dtype(x): try: x = x.astype(np.complex128) - except: + except Exception: x = x.astype(np.float64) else: if not np.any(x.imag): diff --git a/pandas/core/ops.py b/pandas/core/ops.py index a86e57fd8876d..6ccd5f5407768 100644 --- a/pandas/core/ops.py +++ b/pandas/core/ops.py @@ -1546,7 +1546,7 @@ def na_op(x, y): y = bool(y) try: result = libops.scalar_binop(x, y, op) - except: + except Exception: raise TypeError("cannot compare a dtyped [{dtype}] array " "with a scalar of type [{typ}]" .format(dtype=x.dtype, diff --git a/pandas/core/sparse/array.py b/pandas/core/sparse/array.py index eb07e5ef6c85f..33302a048e2a8 100644 --- a/pandas/core/sparse/array.py +++ b/pandas/core/sparse/array.py @@ -306,7 +306,7 @@ def __setstate__(self, state): def __len__(self): try: return self.sp_index.length - except: + except Exception: return 0 def __unicode__(self): diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index 57387b9ea870a..0b9ba67e74f08 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -241,7 +241,7 @@ def _convert_listlike_datetimes(arg, box, format, name=None, tz=None, if format == '%Y%m%d': try: result = _attempt_YYYYMMDD(arg, errors=errors) - except: + except Exception: raise ValueError("cannot convert the input to " "'%Y%m%d' date format") @@ -331,7 +331,7 @@ def _adjust_to_origin(arg, origin, unit): raise ValueError("unit must be 'D' for origin='julian'") try: arg = arg - j0 - except: + except Exception: raise ValueError("incompatible 'arg' type for given " "'origin'='julian'") @@ -728,21 +728,21 @@ def calc_with_mask(carg, mask): # try intlike / strings that are ints try: return calc(arg.astype(np.int64)) - except: + except Exception: pass # a float with actual np.nan try: carg = arg.astype(np.float64) return calc_with_mask(carg, notna(carg)) - except: + except Exception: pass # string with NaN-like try: mask = ~algorithms.isin(arg, list(tslib.nat_strings)) return calc_with_mask(arg, mask) - except: + except Exception: pass return None diff --git a/pandas/core/window.py b/pandas/core/window.py index eed0e97f30dc9..76f2655dbed43 100644 --- a/pandas/core/window.py +++ b/pandas/core/window.py @@ -2502,7 +2502,7 @@ def _offset(window, center): offset = (window - 1) / 2. if center else 0 try: return int(offset) - except: + except ValueError: return offset.astype(int) diff --git a/pandas/io/clipboards.py b/pandas/io/clipboards.py index 0d564069c681f..8fee3befce528 100644 --- a/pandas/io/clipboards.py +++ b/pandas/io/clipboards.py @@ -42,7 +42,7 @@ def read_clipboard(sep=r'\s+', **kwargs): # pragma: no cover text, encoding=(kwargs.get('encoding') or get_option('display.encoding')) ) - except: + except Exception: pass # Excel copies into clipboard with \t separation diff --git a/pandas/io/formats/console.py b/pandas/io/formats/console.py index 45d50ea3fa073..ff6b37d4b34e9 100644 --- a/pandas/io/formats/console.py +++ b/pandas/io/formats/console.py @@ -100,7 +100,7 @@ def check_main(): try: return __IPYTHON__ or check_main() # noqa - except: + except Exception: return check_main() @@ -118,7 +118,7 @@ def in_qtconsole(): ip.config.get('IPKernelApp', {}).get('parent_appname', "")) if 'qtconsole' in front_end.lower(): return True - except: + except Exception: return False return False @@ -137,7 +137,7 @@ def in_ipnb(): ip.config.get('IPKernelApp', {}).get('parent_appname', "")) if 'notebook' in front_end.lower(): return True - except: + except Exception: return False return False @@ -149,7 +149,7 @@ def in_ipython_frontend(): try: ip = get_ipython() # noqa return 'zmq' in str(type(ip)).lower() - except: + except Exception: pass return False diff --git a/pandas/io/formats/terminal.py b/pandas/io/formats/terminal.py index dcd6f2cf4a718..cbc3b140f814d 100644 --- a/pandas/io/formats/terminal.py +++ b/pandas/io/formats/terminal.py @@ -78,7 +78,7 @@ def _get_terminal_size_windows(): h = windll.kernel32.GetStdHandle(-12) csbi = create_string_buffer(22) res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) - except: + except Exception: return None if res: import struct @@ -108,7 +108,7 @@ def _get_terminal_size_tput(): output = proc.communicate(input=None) rows = int(output[0]) return (cols, rows) - except: + except Exception: return None @@ -120,7 +120,7 @@ def ioctl_GWINSZ(fd): import struct cr = struct.unpack( 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) - except: + except Exception: return None return cr cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) @@ -129,13 +129,13 @@ def ioctl_GWINSZ(fd): fd = os.open(os.ctermid(), os.O_RDONLY) cr = ioctl_GWINSZ(fd) os.close(fd) - except: + except Exception: pass if not cr or cr == (0, 0): try: from os import environ as env cr = (env['LINES'], env['COLUMNS']) - except: + except KeyError: return None return int(cr[1]), int(cr[0]) diff --git a/pandas/io/packers.py b/pandas/io/packers.py index 7a1e72637f4ce..4a77c60bdde59 100644 --- a/pandas/io/packers.py +++ b/pandas/io/packers.py @@ -703,7 +703,7 @@ def create_block(b): dtype = dtype_for(obj[u'dtype']) try: return dtype(obj[u'data']) - except: + except Exception: return dtype.type(obj[u'data']) elif typ == u'np_complex': return complex(obj[u'real'] + u'+' + obj[u'imag'] + u'j') diff --git a/pandas/io/parsers.py b/pandas/io/parsers.py index 8d37bf4c84d5d..371f81c039040 100755 --- a/pandas/io/parsers.py +++ b/pandas/io/parsers.py @@ -1808,7 +1808,7 @@ def close(self): # close additional handles opened by C parser (for compression) try: self._reader.close() - except: + except Exception: pass def _set_noconvert_columns(self): @@ -3037,7 +3037,7 @@ def converter(*date_cols): errors='ignore', infer_datetime_format=infer_datetime_format ) - except: + except Exception: return tools.to_datetime( parsing.try_parse_dates(strs, dayfirst=dayfirst)) else: @@ -3266,7 +3266,7 @@ def _floatify_na_values(na_values): v = float(v) if not np.isnan(v): result.add(v) - except: + except Exception: pass return result @@ -3287,11 +3287,11 @@ def _stringify_na_values(na_values): result.append(str(v)) result.append(v) - except: + except Exception: pass try: result.append(int(x)) - except: + except Exception: pass return set(result) diff --git a/pandas/io/pickle.py b/pandas/io/pickle.py index 6738daec9397c..28d1fe37a2122 100644 --- a/pandas/io/pickle.py +++ b/pandas/io/pickle.py @@ -168,12 +168,12 @@ def try_read(path, encoding=None): return read_wrapper( lambda f: pc.load(f, encoding=encoding, compat=False)) # compat pickle - except: + except Exception: return read_wrapper( lambda f: pc.load(f, encoding=encoding, compat=True)) try: return try_read(path) - except: + except Exception: if PY3: return try_read(path, encoding='latin1') raise diff --git a/pandas/io/pytables.py b/pandas/io/pytables.py index c57b1c3e211f6..1d04833c404f9 100644 --- a/pandas/io/pytables.py +++ b/pandas/io/pytables.py @@ -258,7 +258,7 @@ def _tables(): try: _table_file_open_policy_is_strict = ( tables.file._FILE_OPEN_POLICY == 'strict') - except: + except Exception: pass return _table_mod @@ -395,11 +395,11 @@ def read_hdf(path_or_buf, key=None, mode='r', **kwargs): 'contains multiple datasets.') key = candidate_only_group._v_pathname return store.select(key, auto_close=auto_close, **kwargs) - except: + except Exception: # if there is an error, close the store try: store.close() - except: + except Exception: pass raise @@ -517,10 +517,9 @@ def __getattr__(self, name): """ allow attribute access to get stores """ try: return self.get(name) - except: - pass - raise AttributeError("'%s' object has no attribute '%s'" % - (type(self).__name__, name)) + except Exception: + raise AttributeError("'%s' object has no attribute '%s'" % + (type(self).__name__, name)) def __contains__(self, key): """ check for existence of this key @@ -675,7 +674,7 @@ def flush(self, fsync=False): if fsync: try: os.fsync(self._handle.fileno()) - except: + except Exception: pass def get(self, key): @@ -1161,7 +1160,7 @@ def get_node(self, key): if not key.startswith('/'): key = '/' + key return self._handle.get_node(self.root, key) - except: + except Exception: return None def get_storer(self, key): @@ -1270,7 +1269,7 @@ def _validate_format(self, format, kwargs): # validate try: kwargs['format'] = _FORMAT_MAP[format.lower()] - except: + except Exception: raise TypeError("invalid HDFStore format specified [{0}]" .format(format)) @@ -1307,7 +1306,7 @@ def error(t): try: pt = _TYPE_MAP[type(value)] - except: + except KeyError: error('_TYPE_MAP') # we are actually a table @@ -1318,7 +1317,7 @@ def error(t): if u('table') not in pt: try: return globals()[_STORER_MAP[pt]](self, group, **kwargs) - except: + except Exception: error('_STORER_MAP') # existing node (and must be a table) @@ -1354,12 +1353,12 @@ def error(t): fields = group.table._v_attrs.fields if len(fields) == 1 and fields[0] == u('value'): tt = u('legacy_frame') - except: + except Exception: pass try: return globals()[_TABLE_MAP[tt]](self, group, **kwargs) - except: + except Exception: error('_TABLE_MAP') def _write_to_group(self, key, value, format, index=True, append=False, @@ -1624,7 +1623,7 @@ def is_indexed(self): """ return whether I am an indexed column """ try: return getattr(self.table.cols, self.cname).is_indexed - except: + except Exception: False def copy(self): @@ -1656,7 +1655,7 @@ def convert(self, values, nan_rep, encoding, errors): kwargs['name'] = _ensure_decoded(self.index_name) try: self.values = Index(values, **kwargs) - except: + except Exception: # if the output freq is different that what we recorded, # it should be None (see also 'doc example part 2') @@ -1869,7 +1868,7 @@ def create_for_block( m = re.search(r"values_block_(\d+)", name) if m: name = "values_%s" % m.groups()[0] - except: + except Exception: pass return cls(name=name, cname=cname, **kwargs) @@ -2232,7 +2231,7 @@ def convert(self, values, nan_rep, encoding, errors): try: self.data = self.data.astype(dtype, copy=False) - except: + except Exception: self.data = self.data.astype('O', copy=False) # convert nans / decode @@ -2325,7 +2324,7 @@ def set_version(self): self.version = tuple(int(x) for x in version.split('.')) if len(self.version) == 2: self.version = self.version + (0,) - except: + except Exception: self.version = (0, 0, 0) @property @@ -2769,7 +2768,7 @@ def write_array(self, key, value, items=None): else: try: items = list(items) - except: + except TypeError: pass ws = performance_doc % (inferred_type, key, items) warnings.warn(ws, PerformanceWarning, stacklevel=7) @@ -2843,7 +2842,7 @@ class SeriesFixed(GenericFixed): def shape(self): try: return len(getattr(self.group, 'values')), - except: + except TypeError: return None def read(self, **kwargs): @@ -2961,7 +2960,7 @@ def shape(self): shape = shape[::-1] return shape - except: + except Exception: return None def read(self, start=None, stop=None, **kwargs): @@ -3495,7 +3494,7 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None, if axes is None: try: axes = _AXES_MAP[type(obj)] - except: + except KeyError: raise TypeError("cannot properly create the storer for: " "[group->%s,value->%s]" % (self.group._v_name, type(obj))) @@ -3614,7 +3613,7 @@ def get_blk_items(mgr, blocks): b, b_items = by_items.pop(items) new_blocks.append(b) new_blk_items.append(b_items) - except: + except Exception: raise ValueError( "cannot match existing table structure for [%s] on " "appending data" % ','.join(pprint_thing(item) for @@ -3642,7 +3641,7 @@ def get_blk_items(mgr, blocks): if existing_table is not None and validate: try: existing_col = existing_table.values_axes[i] - except: + except KeyError: raise ValueError("Incompatible appended table [%s] with " "existing table [%s]" % (blocks, existing_table.values_axes)) @@ -4460,7 +4459,7 @@ def _get_info(info, name): """ get/create the info for this name """ try: idx = info[name] - except: + except KeyError: idx = info[name] = dict() return idx @@ -4782,7 +4781,7 @@ def __init__(self, table, where=None, start=None, stop=None, **kwargs): ) self.coordinates = where - except: + except Exception: pass if self.coordinates is None: diff --git a/pandas/io/sas/sas_xport.py b/pandas/io/sas/sas_xport.py index 14e7ad9682db6..993af716f7037 100644 --- a/pandas/io/sas/sas_xport.py +++ b/pandas/io/sas/sas_xport.py @@ -246,7 +246,7 @@ def __init__(self, filepath_or_buffer, index=None, encoding='ISO-8859-1', contents = filepath_or_buffer.read() try: contents = contents.encode(self._encoding) - except: + except Exception: pass self.filepath_or_buffer = compat.BytesIO(contents) diff --git a/pandas/io/sas/sasreader.py b/pandas/io/sas/sasreader.py index b8a0bf5733158..0478e1ce8bb82 100644 --- a/pandas/io/sas/sasreader.py +++ b/pandas/io/sas/sasreader.py @@ -46,7 +46,7 @@ def read_sas(filepath_or_buffer, format=None, index=None, encoding=None, format = "sas7bdat" else: raise ValueError("unable to infer format of SAS file") - except: + except Exception: pass if format.lower() == 'xport': diff --git a/pandas/io/sql.py b/pandas/io/sql.py index a582d32741ae9..c34d216f17f1e 100644 --- a/pandas/io/sql.py +++ b/pandas/io/sql.py @@ -382,7 +382,7 @@ def read_sql(sql, con, index_col=None, coerce_float=True, params=None, try: _is_table_name = pandas_sql.has_table(sql) - except: + except Exception: _is_table_name = False if _is_table_name: @@ -847,7 +847,7 @@ def _sqlalchemy_type(self, col): try: tz = col.tzinfo # noqa return DateTime(timezone=True) - except: + except Exception: return DateTime if col_type == 'timedelta64': warnings.warn("the 'timedelta' type is not supported, and will be " @@ -1360,7 +1360,7 @@ def run_transaction(self): try: yield cur self.con.commit() - except: + except Exception: self.con.rollback() raise finally: diff --git a/pandas/io/stata.py b/pandas/io/stata.py index efd5f337fdf69..66b66e1f3887e 100644 --- a/pandas/io/stata.py +++ b/pandas/io/stata.py @@ -1252,12 +1252,12 @@ def _read_old_header(self, first_char): try: self.typlist = [self.TYPE_MAP[typ] for typ in typlist] - except: + except KeyError: raise ValueError("cannot convert stata types [{0}]" .format(','.join(str(x) for x in typlist))) try: self.dtyplist = [self.DTYPE_MAP[typ] for typ in typlist] - except: + except KeyError: raise ValueError("cannot convert stata dtypes [{0}]" .format(','.join(str(x) for x in typlist))) diff --git a/pandas/tests/frame/test_arithmetic.py b/pandas/tests/frame/test_arithmetic.py index f142f770a0c54..4c77309a555ce 100644 --- a/pandas/tests/frame/test_arithmetic.py +++ b/pandas/tests/frame/test_arithmetic.py @@ -214,7 +214,7 @@ def test_arith_flex_frame(self): dtype = dict(C=None) tm.assert_frame_equal(result, exp) _check_mixed_int(result, dtype=dtype) - except: + except Exception: printing.pprint_thing("Failing operation %r" % op) raise diff --git a/pandas/tests/indexing/common.py b/pandas/tests/indexing/common.py index cbf1bdbce9574..6fb1f315e1cc9 100644 --- a/pandas/tests/indexing/common.py +++ b/pandas/tests/indexing/common.py @@ -157,7 +157,7 @@ def get_result(self, obj, method, key, axis): with catch_warnings(record=True): try: xp = getattr(obj, method).__getitem__(_axify(obj, key, axis)) - except: + except Exception: xp = getattr(obj, method).__getitem__(key) return xp @@ -219,7 +219,7 @@ def _print(result, error=None): try: xp = self.get_result(obj, method2, k2, a) - except: + except Exception: result = 'no comp' _print(result) return diff --git a/pandas/tests/io/formats/test_format.py b/pandas/tests/io/formats/test_format.py index c19f8e57f9ae7..0650f912def0f 100644 --- a/pandas/tests/io/formats/test_format.py +++ b/pandas/tests/io/formats/test_format.py @@ -70,7 +70,7 @@ def has_horizontally_truncated_repr(df): try: # Check header row fst_line = np.array(repr(df).splitlines()[0].split()) cand_col = np.where(fst_line == '...')[0][0] - except: + except Exception: return False # Make sure each row has this ... in the same place r = repr(df) @@ -452,7 +452,7 @@ def test_to_string_repr_unicode(self): for line in rs[1:]: try: line = line.decode(get_option("display.encoding")) - except: + except Exception: pass if not line.startswith('dtype:'): assert len(line) == line_len diff --git a/pandas/tests/io/test_pytables.py b/pandas/tests/io/test_pytables.py index ddcfcc0842d1a..3ee90323afb3b 100644 --- a/pandas/tests/io/test_pytables.py +++ b/pandas/tests/io/test_pytables.py @@ -47,7 +47,7 @@ def safe_remove(path): if path is not None: try: os.remove(path) - except: + except Exception: pass @@ -55,7 +55,7 @@ def safe_close(store): try: if store is not None: store.close() - except: + except Exception: pass @@ -113,7 +113,7 @@ def _maybe_remove(store, key): no content from previous tests using the same table name.""" try: store.remove(key) - except: + except Exception: pass @@ -4590,7 +4590,7 @@ def do_copy(f, new_f=None, keys=None, safe_close(tstore) try: os.close(fd) - except: + except Exception: pass safe_remove(new_f) diff --git a/pandas/tests/io/test_sql.py b/pandas/tests/io/test_sql.py index 824e5a2b23df3..acf7fb24162eb 100644 --- a/pandas/tests/io/test_sql.py +++ b/pandas/tests/io/test_sql.py @@ -1783,7 +1783,7 @@ def test_read_procedure(self): try: r1 = connection.execute(proc) # noqa trans.commit() - except: + except Exception: trans.rollback() raise @@ -2363,7 +2363,7 @@ def setup_class(cls): # No real user should allow root access with a blank password. pymysql.connect(host='localhost', user='root', passwd='', db='pandas_nosetest') - except: + except Exception: pass else: return @@ -2390,7 +2390,7 @@ def setup_method(self, request, datapath): # No real user should allow root access with a blank password. self.conn = pymysql.connect(host='localhost', user='root', passwd='', db='pandas_nosetest') - except: + except Exception: pass else: return diff --git a/pandas/tests/test_multilevel.py b/pandas/tests/test_multilevel.py index dcfeab55f94fc..494f89ce0974f 100644 --- a/pandas/tests/test_multilevel.py +++ b/pandas/tests/test_multilevel.py @@ -1358,7 +1358,7 @@ def f(): try: df = f() - except: + except Exception: pass assert (df['foo', 'one'] == 0).all() diff --git a/pandas/tests/test_nanops.py b/pandas/tests/test_nanops.py index a70ee80aee180..75cd2a1b3635a 100644 --- a/pandas/tests/test_nanops.py +++ b/pandas/tests/test_nanops.py @@ -141,12 +141,12 @@ def _coerce_tds(targ, res): if axis != 0 and hasattr( targ, 'shape') and targ.ndim and targ.shape != res.shape: res = np.split(res, [targ.shape[0]], axis=0)[0] - except: + except Exception: targ, res = _coerce_tds(targ, res) try: tm.assert_almost_equal(targ, res, check_dtype=check_dtype) - except: + except AssertionError: # handle timedelta dtypes if hasattr(targ, 'dtype') and targ.dtype == 'm8[ns]': @@ -167,11 +167,11 @@ def _coerce_tds(targ, res): else: try: res = res.astype('c16') - except: + except Exception: res = res.astype('f8') try: targ = targ.astype('c16') - except: + except Exception: targ = targ.astype('f8') # there should never be a case where numpy returns an object # but nanops doesn't, so make that an exception diff --git a/pandas/tests/test_panel.py b/pandas/tests/test_panel.py index b968c52ce3dfd..0fc71465faf11 100644 --- a/pandas/tests/test_panel.py +++ b/pandas/tests/test_panel.py @@ -337,13 +337,13 @@ def check_op(op, name): for op in ops: try: check_op(getattr(operator, op), op) - except: + except Exception: pprint_thing("Failing operation: %r" % op) raise if compat.PY3: try: check_op(operator.truediv, 'div') - except: + except Exception: pprint_thing("Failing operation: %r" % 'div') raise diff --git a/pandas/tests/test_strings.py b/pandas/tests/test_strings.py index ab508174fa4a9..b11f2ee18f0de 100644 --- a/pandas/tests/test_strings.py +++ b/pandas/tests/test_strings.py @@ -2708,7 +2708,7 @@ def test_slice(self): expected = Series([s[start:stop:step] if not isna(s) else NA for s in values]) tm.assert_series_equal(result, expected) - except: + except AssertionError: print('failed on %s:%s:%s' % (start, stop, step)) raise diff --git a/pandas/tseries/holiday.py b/pandas/tseries/holiday.py index 33dcf6d64b302..a8a05b88c0efb 100644 --- a/pandas/tseries/holiday.py +++ b/pandas/tseries/holiday.py @@ -292,7 +292,7 @@ def _apply_rule(self, dates): def register(cls): try: name = cls.name - except: + except Exception: name = cls.__name__ holiday_calendars[name] = cls @@ -424,7 +424,7 @@ def merge_class(base, other): """ try: other = other.rules - except: + except Exception: pass if not isinstance(other, list): @@ -433,7 +433,7 @@ def merge_class(base, other): try: base = base.rules - except: + except Exception: pass if not isinstance(base, list): From c251daee88712e405ceccccab7a93727ec633394 Mon Sep 17 00:00:00 2001 From: Aaron Critchley Date: Fri, 7 Sep 2018 00:54:47 +0100 Subject: [PATCH 4/4] Removing leftover from CI test --- pandas/core/indexes/base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index aa4594355a965..710c9d0e296c9 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -155,7 +155,6 @@ def index_arithmetic_method(self, other): name = '__{name}__'.format(name=op.__name__) # TODO: docstring? - foo = 'bar' # Never commit, just testing CI return set_function_name(index_arithmetic_method, name, cls)