Skip to content

Commit 178a352

Browse files
DOC: fix EX03 errors in docstrings - pandas.io.json.build_table_schema, pandas.read_stata, pandas.plotting.scatter_matrix, pandas.Index.droplevel , pandas.Grouper (#56880)
Co-authored-by: Marc Garcia <[email protected]>
1 parent de723d2 commit 178a352

File tree

6 files changed

+7
-14
lines changed

6 files changed

+7
-14
lines changed

ci/code_checks.sh

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -80,16 +80,9 @@ if [[ -z "$CHECK" || "$CHECK" == "docstrings" ]]; then
8080
pandas.errors.SpecificationError \
8181
pandas.errors.UndefinedVariableError \
8282
pandas.read_json \
83-
pandas.io.json.build_table_schema \
8483
pandas.io.formats.style.Styler.to_latex \
8584
pandas.read_parquet \
8685
pandas.DataFrame.to_sql \
87-
pandas.read_stata \
88-
pandas.plotting.scatter_matrix \
89-
pandas.Index.droplevel \
90-
pandas.MultiIndex.names \
91-
pandas.MultiIndex.droplevel \
92-
pandas.Grouper \
9386
pandas.io.formats.style.Styler.map \
9487
pandas.io.formats.style.Styler.apply_index \
9588
pandas.io.formats.style.Styler.map_index \

pandas/core/groupby/grouper.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,8 +151,8 @@ class Grouper:
151151
Specify a resample operation on the column 'Publish date'
152152
153153
>>> df = pd.DataFrame(
154-
... {
155-
... "Publish date": [
154+
... {
155+
... "Publish date": [
156156
... pd.Timestamp("2000-01-02"),
157157
... pd.Timestamp("2000-01-02"),
158158
... pd.Timestamp("2000-01-09"),

pandas/core/indexes/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2124,7 +2124,7 @@ def droplevel(self, level: IndexLabel = 0):
21242124
Examples
21252125
--------
21262126
>>> mi = pd.MultiIndex.from_arrays(
2127-
... [[1, 2], [3, 4], [5, 6]], names=['x', 'y', 'z'])
2127+
... [[1, 2], [3, 4], [5, 6]], names=['x', 'y', 'z'])
21282128
>>> mi
21292129
MultiIndex([(1, 3, 5),
21302130
(2, 4, 6)],

pandas/io/json/_table_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ def build_table_schema(
277277
... {'A': [1, 2, 3],
278278
... 'B': ['a', 'b', 'c'],
279279
... 'C': pd.date_range('2016-01-01', freq='d', periods=3),
280-
... }, index=pd.Index(range(3), name='idx'))
280+
... }, index=pd.Index(range(3), name='idx'))
281281
>>> build_table_schema(df)
282282
{'fields': \
283283
[{'name': 'idx', 'type': 'integer'}, \

pandas/io/stata.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@
176176
Creating a dummy stata for this example
177177
178178
>>> df = pd.DataFrame({{'animal': ['falcon', 'parrot', 'falcon', 'parrot'],
179-
... 'speed': [350, 18, 361, 15]}}) # doctest: +SKIP
179+
... 'speed': [350, 18, 361, 15]}}) # doctest: +SKIP
180180
>>> df.to_stata('animals.dta') # doctest: +SKIP
181181
182182
Read a Stata dta file:
@@ -189,7 +189,7 @@
189189
>>> df = pd.DataFrame(values, columns=["i"]) # doctest: +SKIP
190190
>>> df.to_stata('filename.dta') # doctest: +SKIP
191191
192-
>>> with pd.read_stata('filename.dta', chunksize=10000) as itr: # doctest: +SKIP
192+
>>> with pd.read_stata('filename.dta', chunksize=10000) as itr: # doctest: +SKIP
193193
>>> for chunk in itr:
194194
... # Operate on a single chunk, e.g., chunk.mean()
195195
... pass # doctest: +SKIP

pandas/plotting/_misc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ def scatter_matrix(
204204
.. plot::
205205
:context: close-figs
206206
207-
>>> df = pd.DataFrame(np.random.randn(1000, 4), columns=['A','B','C','D'])
207+
>>> df = pd.DataFrame(np.random.randn(1000, 4), columns=['A', 'B', 'C', 'D'])
208208
>>> pd.plotting.scatter_matrix(df, alpha=0.2)
209209
array([[<Axes: xlabel='A', ylabel='A'>, <Axes: xlabel='B', ylabel='A'>,
210210
<Axes: xlabel='C', ylabel='A'>, <Axes: xlabel='D', ylabel='A'>],

0 commit comments

Comments
 (0)