-
-
Notifications
You must be signed in to change notification settings - Fork 18.5k
[WIP] Test (and more fixes) for duplicate indices with concat #38745
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
c6f1677
19c95f0
bb33098
11378a8
ca316ce
e59bef2
f176ad3
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,6 +5,8 @@ | |
import numpy as np | ||
import pytest | ||
|
||
from pandas.errors import InvalidIndexError | ||
|
||
import pandas as pd | ||
from pandas import DataFrame, Index, MultiIndex, Series, concat, date_range | ||
import pandas._testing as tm | ||
|
@@ -445,6 +447,57 @@ def test_concat_ordered_dict(self): | |
tm.assert_series_equal(result, expected) | ||
|
||
|
||
@pytest.mark.parametrize("join", ["inner", "outer"]) | ||
def test_concat_duplicates_error(index, join): | ||
# https://github.com/pandas-dev/pandas/issues/6963 | ||
# Needs an index with 4 unique values | ||
index = index.unique() | ||
if len(index) < 4: | ||
pytest.skip() | ||
|
||
index_unique = index[:4] | ||
index_non_unique = index_unique[[0, 0, 1, 2, 3]] | ||
|
||
df_non_unique = DataFrame( | ||
np.ones((1, len(index_non_unique))), columns=index_non_unique | ||
) | ||
df_unique = DataFrame(np.ones((1, len(index_unique))), columns=index_unique) | ||
|
||
with pytest.raises(InvalidIndexError): | ||
_ = pd.concat([df_non_unique, df_unique], join=join) | ||
|
||
|
||
@pytest.mark.xfail(reason="Not implemented") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. is there an issue for this? what is this case? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I was expecting this to be allowed: import pandas as pd
result = pd.concat(
[
pd.Series(0, index=[0, 0, 1, 2]),
pd.Series(1, index=[1, 2]),
],
join="inner",
)
expected = pd.DataFrame({0: [0, 0], 1: [1, 1]}, index=[1, 2])
pd.testing.assert_frame_equal(result, expected) Because the intersection of those indices is well defined. However, it turns out this does not work, and also doesn't work in 1.1.5. I sort of opened this issue here: #38773, but that was a more low-level issue. |
||
def test_concat_intersection_duplicates(index): | ||
# ailing: https://github.com/pandas-dev/pandas/pull/38745/files#r549577521 | ||
# Concat is valid if the intersection does not contain duplicates | ||
# Needs an index with 4 unique values | ||
index = index.unique() | ||
if len(index) < 4: | ||
pytest.skip() | ||
|
||
index_unique = index[[0, 1, 2]] | ||
index_non_unique = index[[1, 2, 3, 3]] | ||
|
||
df_unique = DataFrame( | ||
np.ones((1, len(index_unique))), | ||
columns=index_unique, | ||
) | ||
df_non_unique = DataFrame( | ||
np.zeros((1, len(index_non_unique))), | ||
columns=index_non_unique, | ||
) | ||
|
||
result = pd.concat([df_unique, df_non_unique], join="inner") | ||
expected = DataFrame( | ||
[[1, 1], [0, 0]], | ||
columns=index[[1, 2]], | ||
index=[0, 0], | ||
) | ||
|
||
tm.assert_frame_equal(result, expected) | ||
|
||
|
||
@pytest.mark.parametrize("pdt", [Series, pd.DataFrame]) | ||
@pytest.mark.parametrize("dt", np.sctypes["float"]) | ||
def test_concat_no_unnecessary_upcast(dt, pdt): | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
add some comments here as it is non-obvious what is happening
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
How about this?