Skip to content

BUG: Possibly invalidate the item_cache when numpy implicty converts a v... #3977

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion doc/source/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ pandas 0.12
- Fixed insertion issue into DataFrame, after rename (:issue:`4032`)
- Fixed testing issue where too many sockets where open thus leading to a
connection reset issue (:issue:`3982`, :issue:`3985`)

- Possibly invalidate the item_cache when numpy implicty converts a view to a copy (:issue:`3970`)

pandas 0.11.0
=============
Expand Down
3 changes: 3 additions & 0 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# pylint: disable=W0231,E1101

import weakref
import numpy as np

from pandas.core.index import MultiIndex
Expand Down Expand Up @@ -666,6 +667,7 @@ def _get_item_cache(self, item):
values = self._data.get(item)
res = self._box_item_values(item, values)
cache[item] = res
res._cacher = weakref.ref(self)
return res

def _box_item_values(self, key, values):
Expand Down Expand Up @@ -1065,6 +1067,7 @@ def take(self, indices, axis=0, convert=True):
new_data = self._data.reindex_axis(new_items, axis=0)
else:
new_data = self._data.take(indices, axis=axis, verify=False)

return self._constructor(new_data)

def tz_convert(self, tz, axis=0, copy=True):
Expand Down
9 changes: 9 additions & 0 deletions pandas/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,14 @@ def setter(item, v):
if np.prod(values.shape):
values[indexer] = value

# we might need to invalidate a cached version of myself
cacher = getattr(self.obj,'_cacher',None)
if cacher is not None:
try:
cacher()._clear_item_cache()
except:
pass

def _align_series(self, indexer, ser):
# indexer to assign Series can be tuple or scalar
if isinstance(indexer, tuple):
Expand Down Expand Up @@ -709,6 +717,7 @@ def _getbool_axis(self, key, axis=0):
return self.obj.take(inds, axis=axis, convert=False)
except (Exception), detail:
raise self._exception(detail)

def _get_slice_axis(self, slice_obj, axis=0):
""" this is pretty simple as we just have to deal with labels """
obj = self.obj
Expand Down
1 change: 1 addition & 0 deletions pandas/core/internals.py
Original file line number Diff line number Diff line change
Expand Up @@ -1650,6 +1650,7 @@ def _consolidate_inplace(self):
self._known_consolidated = True

def get(self, item):

if self.items.is_unique:
_, block = self._find_block(item)
return block.get(item)
Expand Down
13 changes: 12 additions & 1 deletion pandas/tests/test_indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1066,7 +1066,18 @@ def test_iloc_non_unique_indexing(self):
result = df2.loc[idx]
assert_frame_equal(result, expected)


def test_series_iloc(self):
# GH 3970

df = DataFrame({ "aa":range(5), "bb":[2.2]*5})
df["cc"] = 0.0
ck = [True]*len(df)
df["bb"].loc[0] = .13 # works
df_tmp = df.iloc[ck]
df["bb"].loc[0] = .15 # doesn't work
expected = DataFrame({ "aa":range(5), "bb":[0.15,2.2,2.2,2.2,2.2], "cc": 0.0 })
assert_frame_equal(df, expected)

if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
Expand Down