@@ -115,11 +115,12 @@ def _infer_concat_order_from_coords(datasets):
115
115
return combined_ids , concat_dims
116
116
117
117
118
- def _check_shape_tile_ids (combined_tile_ids ):
118
+ def _check_dimension_depth_tile_ids (combined_tile_ids ):
119
+ """
120
+ Check all tuples are the same length, i.e. check that all lists are
121
+ nested to the same depth.
122
+ """
119
123
tile_ids = combined_tile_ids .keys ()
120
-
121
- # Check all tuples are the same length
122
- # i.e. check that all lists are nested to the same depth
123
124
nesting_depths = [len (tile_id ) for tile_id in tile_ids ]
124
125
if not nesting_depths :
125
126
nesting_depths = [0 ]
@@ -128,8 +129,13 @@ def _check_shape_tile_ids(combined_tile_ids):
128
129
"The supplied objects do not form a hypercube because"
129
130
" sub-lists do not have consistent depths"
130
131
)
132
+ # return these just to be reused in _check_shape_tile_ids
133
+ return tile_ids , nesting_depths
131
134
132
- # Check all lists along one dimension are same length
135
+
136
+ def _check_shape_tile_ids (combined_tile_ids ):
137
+ """Check all lists along one dimension are same length."""
138
+ tile_ids , nesting_depths = _check_dimension_depth_tile_ids (combined_tile_ids )
133
139
for dim in range (nesting_depths [0 ]):
134
140
indices_along_dim = [tile_id [dim ] for tile_id in tile_ids ]
135
141
occurrences = Counter (indices_along_dim )
@@ -536,7 +542,8 @@ def combine_by_coords(
536
542
coords : {'minimal', 'different', 'all' or list of str}, optional
537
543
As per the 'data_vars' kwarg, but for coordinate variables.
538
544
fill_value : scalar, optional
539
- Value to use for newly missing values
545
+ Value to use for newly missing values. If None, raises a ValueError if
546
+ the passed Datasets do not create a complete hypercube.
540
547
join : {'outer', 'inner', 'left', 'right', 'exact'}, optional
541
548
String indicating how to combine differing indexes
542
549
(excluding concat_dim) in objects
@@ -653,6 +660,15 @@ def combine_by_coords(
653
660
temperature (y, x) float64 1.654 10.63 7.015 2.543 ... 12.46 2.22 15.96
654
661
precipitation (y, x) float64 0.2136 0.9974 0.7603 ... 0.6125 0.4654 0.5953
655
662
663
+ >>> xr.combine_by_coords([x1, x2, x3])
664
+ <xarray.Dataset>
665
+ Dimensions: (x: 6, y: 4)
666
+ Coordinates:
667
+ * x (x) int64 10 20 30 40 50 60
668
+ * y (y) int64 0 1 2 3
669
+ Data variables:
670
+ temperature (y, x) float64 1.654 10.63 7.015 nan ... 12.46 2.22 15.96
671
+ precipitation (y, x) float64 0.2136 0.9974 0.7603 ... 0.6125 0.4654 0.5953
656
672
"""
657
673
658
674
# Group by data vars
@@ -667,7 +683,13 @@ def combine_by_coords(
667
683
list (datasets_with_same_vars )
668
684
)
669
685
670
- _check_shape_tile_ids (combined_ids )
686
+ if fill_value is None :
687
+ # check that datasets form complete hypercube
688
+ _check_shape_tile_ids (combined_ids )
689
+ else :
690
+ # check only that all datasets have same dimension depth for these
691
+ # vars
692
+ _check_dimension_depth_tile_ids (combined_ids )
671
693
672
694
# Concatenate along all of concat_dims one by one to create single ds
673
695
concatenated = _combine_nd (
0 commit comments