Skip to content

Commit

Permalink
BUG: fillna called with Series should be analogous to with dict close #…
Browse files Browse the repository at this point in the history
  • Loading branch information
wesm committed Jun 19, 2012
1 parent 7fb453a commit 7bc6455
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 2 deletions.
1 change: 1 addition & 0 deletions RELEASE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ pandas 0.8.0
- Reset index mapping when grouping Series in Cython (#1423)
- Fix outer/inner DataFrame.join with non-unique indexes (#1421)
- Fix MultiIndex groupby bugs with empty lower levels (#1401)
- Calling fillna with a Series will have same behavior as with dict (#1486)

pandas 0.7.3
============
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -2749,7 +2749,7 @@ def fillna(self, value=None, method='pad', axis=0, inplace=False,
# Float type values
if len(self.columns) == 0:
return self
if isinstance(value, dict):
if isinstance(value, (dict, Series)):
result = self if inplace else self.copy()
for k, v in value.iteritems():
if k not in result:
Expand Down
7 changes: 6 additions & 1 deletion pandas/tests/test_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -3848,7 +3848,7 @@ def test_fillna_inplace(self):
self.assert_(df2 is df)
assert_frame_equal(df2, expected)

def test_fillna_dict(self):
def test_fillna_dict_series(self):
df = DataFrame({'a': [nan, 1, 2, nan, nan],
'b': [1, 2, 3, nan, nan],
'c': [nan, 1, 2, 3, 4]})
Expand All @@ -3863,6 +3863,11 @@ def test_fillna_dict(self):
# it works
result = df.fillna({'a': 0, 'b': 5, 'd' : 7})

# Series treated same as dict
result = df.fillna(df.max())
expected = df.fillna(df.max().to_dict())
assert_frame_equal(result, expected)

def test_fillna_columns(self):
df = DataFrame(np.random.randn(10, 10))
df.values[:, ::2] = np.nan
Expand Down

0 comments on commit 7bc6455

Please sign in to comment.