@@ -163,7 +163,9 @@
Loading
163 163
            for obs_name, log_like in log_likelihood_dict.items():
164 164
                shape = (self.nchains, self.ndraws) + log_like.shape[1:]
165 165
                data[obs_name] = np.reshape(log_like.copy(), shape)
166 -
        return dict_to_dataset(data, library=self.numpyro, dims=self.dims, coords=self.coords)
166 +
        return dict_to_dataset(
167 +
            data, library=self.numpyro, dims=self.dims, coords=self.coords, skip_event_dims=True
168 +
        )
167 169
168 170
    def translate_posterior_predictive_dict_to_xarray(self, dct, dims):
169 171
        """Convert posterior_predictive or prediction samples to xarray."""

@@ -220,9 +220,19 @@
Loading
220 220
        )
221 221
222 222
        return (
223 -
            dict_to_dataset(data, library=self.cmdstanpy, coords=self.coords, dims=self.dims),
224 223
            dict_to_dataset(
225 -
                data_warmup, library=self.cmdstanpy, coords=self.coords, dims=self.dims
224 +
                data,
225 +
                library=self.cmdstanpy,
226 +
                coords=self.coords,
227 +
                dims=self.dims,
228 +
                skip_event_dims=True,
229 +
            ),
230 +
            dict_to_dataset(
231 +
                data_warmup,
232 +
                library=self.cmdstanpy,
233 +
                coords=self.coords,
234 +
                dims=self.dims,
235 +
                skip_event_dims=True,
226 236
            ),
227 237
        )
228 238

@@ -524,8 +524,12 @@
Loading
524 524
            )
525 525
            attrs = None
526 526
        return (
527 -
            dict_to_dataset(data, coords=self.coords, dims=self.dims, attrs=attrs),
528 -
            dict_to_dataset(data_warmup, coords=self.coords, dims=self.dims, attrs=attrs),
527 +
            dict_to_dataset(
528 +
                data, coords=self.coords, dims=self.dims, attrs=attrs, skip_event_dims=True
529 +
            ),
530 +
            dict_to_dataset(
531 +
                data_warmup, coords=self.coords, dims=self.dims, attrs=attrs, skip_event_dims=True
532 +
            ),
529 533
        )
530 534
531 535
    def to_inference_data(self):

@@ -155,7 +155,9 @@
Loading
155 155
                    "Check your model vectorization or set log_likelihood=False"
156 156
                )
157 157
                return None
158 -
        return dict_to_dataset(data, library=self.pyro, coords=self.coords, dims=self.dims)
158 +
        return dict_to_dataset(
159 +
            data, library=self.pyro, coords=self.coords, dims=self.dims, skip_event_dims=True
160 +
        )
159 161
160 162
    def translate_posterior_predictive_dict_to_xarray(self, dct, dims):
161 163
        """Convert posterior_predictive or prediction samples to xarray."""

@@ -318,8 +318,16 @@
Loading
318 318
            except TypeError:
319 319
                warnings.warn(warn_msg)
320 320
        return (
321 -
            dict_to_dataset(data, library=self.pymc3, dims=self.dims, coords=self.coords),
322 -
            dict_to_dataset(data_warmup, library=self.pymc3, dims=self.dims, coords=self.coords),
321 +
            dict_to_dataset(
322 +
                data, library=self.pymc3, dims=self.dims, coords=self.coords, skip_event_dims=True
323 +
            ),
324 +
            dict_to_dataset(
325 +
                data_warmup,
326 +
                library=self.pymc3,
327 +
                dims=self.dims,
328 +
                coords=self.coords,
329 +
                skip_event_dims=True,
330 +
            ),
323 331
        )
324 332
325 333
    def translate_posterior_predictive_dict_to_xarray(self, dct) -> xr.Dataset:

@@ -46,7 +46,9 @@
Loading
46 46
        return wrapped
47 47
48 48
49 -
def generate_dims_coords(shape, var_name, dims=None, coords=None, default_dims=None):
49 +
def generate_dims_coords(
50 +
    shape, var_name, dims=None, coords=None, default_dims=None, skip_event_dims=None
51 +
):
50 52
    """Generate default dimensions and coordinates for a variable.
51 53
52 54
    Parameters
@@ -66,6 +68,7 @@
Loading
66 68
        when manipulating Monte Carlo traces, the ``default_dims`` would be
67 69
        ``["chain" , "draw"]`` which ArviZ uses as its own names for dimensions
68 70
        of MCMC traces.
71 +
    skip_event_dims : bool, default False
69 72
70 73
    Returns
71 74
    -------
@@ -78,26 +81,41 @@
Loading
78 81
        default_dims = []
79 82
    if dims is None:
80 83
        dims = []
81 -
    if len([dim for dim in dims if dim not in default_dims]) > len(shape):
82 -
        warnings.warn(
83 -
            (
84 -
                "In variable {var_name}, there are "
85 -
                + "more dims ({dims_len}) given than exist ({shape_len}). "
86 -
                + "Passed array should have shape ({defaults}*shape)"
87 -
            ).format(
88 -
                var_name=var_name,
89 -
                dims_len=len(dims),
90 -
                shape_len=len(shape),
91 -
                defaults=",".join(default_dims) + ", " if default_dims is not None else "",
92 -
            ),
93 -
            UserWarning,
94 -
        )
84 +
    if skip_event_dims is None:
85 +
        skip_event_dims = False
86 +
95 87
    if coords is None:
96 88
        coords = {}
97 89
98 90
    coords = deepcopy(coords)
99 91
    dims = deepcopy(dims)
100 92
93 +
    ndims = len([dim for dim in dims if dim not in default_dims])
94 +
    if ndims > len(shape):
95 +
        if skip_event_dims:
96 +
            dims = dims[: len(shape)]
97 +
        else:
98 +
            warnings.warn(
99 +
                (
100 +
                    "In variable {var_name}, there are "
101 +
                    + "more dims ({dims_len}) given than exist ({shape_len}). "
102 +
                    + "Passed array should have shape ({defaults}*shape)"
103 +
                ).format(
104 +
                    var_name=var_name,
105 +
                    dims_len=len(dims),
106 +
                    shape_len=len(shape),
107 +
                    defaults=",".join(default_dims) + ", " if default_dims is not None else "",
108 +
                ),
109 +
                UserWarning,
110 +
            )
111 +
    if skip_event_dims:
112 +
        # this is needed in case the reduction keeps the dimension with size 1
113 +
        for i, (dim, dim_size) in enumerate(zip(dims, shape)):
114 +
            print(f"{i}, dim: {dim}, {dim_size} =? {len(coords.get(dim, []))}")
115 +
            if (dim in coords) and (dim_size != len(coords[dim])):
116 +
                dims = dims[:i]
117 +
                break
118 +
101 119
    for idx, dim_len in enumerate(shape):
102 120
        if (len(dims) < idx + 1) or (dims[idx] is None):
103 121
            dim_name = "{var_name}_dim_{idx}".format(var_name=var_name, idx=idx)
@@ -112,7 +130,7 @@
Loading
112 130
    return dims, coords
113 131
114 132
115 -
def numpy_to_data_array(ary, *, var_name="data", coords=None, dims=None):
133 +
def numpy_to_data_array(ary, *, var_name="data", coords=None, dims=None, skip_event_dims=None):
116 134
    """Convert a numpy array to an xarray.DataArray.
117 135
118 136
    The first two dimensions will be (chain, draw), and any remaining
@@ -134,6 +152,7 @@
Loading
134 152
        is the name of the dimension, the values are the index values.
135 153
    dims : List(str)
136 154
        A list of coordinate names for the variable
155 +
    skip_event_dims : bool
137 156
138 157
    Returns
139 158
    -------
@@ -154,7 +173,12 @@
Loading
154 173
        )
155 174
156 175
    dims, coords = generate_dims_coords(
157 -
        shape, var_name, dims=dims, coords=coords, default_dims=default_dims
176 +
        shape,
177 +
        var_name,
178 +
        dims=dims,
179 +
        coords=coords,
180 +
        default_dims=default_dims,
181 +
        skip_event_dims=skip_event_dims,
158 182
    )
159 183
160 184
    # reversed order for default dims: 'chain', 'draw'
@@ -173,7 +197,9 @@
Loading
173 197
    return xr.DataArray(ary, coords=coords, dims=dims)
174 198
175 199
176 -
def dict_to_dataset(data, *, attrs=None, library=None, coords=None, dims=None):
200 +
def dict_to_dataset(
201 +
    data, *, attrs=None, library=None, coords=None, dims=None, skip_event_dims=None
202 +
):
177 203
    """Convert a dictionary of numpy arrays to an xarray.Dataset.
178 204
179 205
    Parameters
@@ -189,6 +215,11 @@
Loading
189 215
    dims : dict[str] -> list[str]
190 216
        Dimensions of each variable. The keys are variable names, values are lists of
191 217
        coordinates.
218 +
    skip_event_dims : bool
219 +
        If True, cut extra dims whenever present to match the shape of the data.
220 +
        Necessary for PPLs which have the same name in both observed data and log
221 +
        likelihood groups, to account for their different shapes when observations are
222 +
        multivariate.
192 223
193 224
    Returns
194 225
    -------
@@ -205,7 +236,7 @@
Loading
205 236
    data_vars = {}
206 237
    for key, values in data.items():
207 238
        data_vars[key] = numpy_to_data_array(
208 -
            values, var_name=key, coords=coords, dims=dims.get(key)
239 +
            values, var_name=key, coords=coords, dims=dims.get(key), skip_event_dims=skip_event_dims
209 240
        )
210 241
    return xr.Dataset(data_vars=data_vars, attrs=make_attrs(attrs=attrs, library=library))
211 242

@@ -138,10 +138,20 @@
Loading
138 138
139 139
        return (
140 140
            dict_to_dataset(
141 -
                data, library=None, coords=self.coords, dims=self.dims, attrs=self.attrs
141 +
                data,
142 +
                library=None,
143 +
                coords=self.coords,
144 +
                dims=self.dims,
145 +
                attrs=self.attrs,
146 +
                skip_event_dims=True,
142 147
            ),
143 148
            dict_to_dataset(
144 -
                data_warmup, library=None, coords=self.coords, dims=self.dims, attrs=self.attrs
149 +
                data_warmup,
150 +
                library=None,
151 +
                coords=self.coords,
152 +
                dims=self.dims,
153 +
                attrs=self.attrs,
154 +
                skip_event_dims=True,
145 155
            ),
146 156
        )
147 157

@@ -143,8 +143,16 @@
Loading
143 143
        }
144 144
145 145
        return (
146 -
            dict_to_dataset(data, library=self.pystan, coords=self.coords, dims=self.dims),
147 -
            dict_to_dataset(data_warmup, library=self.pystan, coords=self.coords, dims=self.dims),
146 +
            dict_to_dataset(
147 +
                data, library=self.pystan, coords=self.coords, dims=self.dims, skip_event_dims=True
148 +
            ),
149 +
            dict_to_dataset(
150 +
                data_warmup,
151 +
                library=self.pystan,
152 +
                coords=self.coords,
153 +
                dims=self.dims,
154 +
                skip_event_dims=True,
155 +
            ),
148 156
        )
149 157
150 158
    @requires("posterior")
Files Coverage
arviz 91.97%
Project Totals (105 files) 91.97%
Python 3.8
Build #20201026.1 -
Python 3.7
Build #20201026.1 -
Python 3.6
Build #20201026.1 -
Python 3.8
Build #20201026.1 -
External latest
Build #20201026.1 -
External special
Build #20201026.1 -
1
ignore:
2
    - arviz/tests/
3

4
codecov:
5
    notify:
6
        after_n_builds: 6
7

8
comment:
9
    behavior: default
10
    branches:
11
        - "master"
12

13
coverage:
14
  status:
15
    project:
16
      default:
17
        target: auto
18
        threshold: 2%
19

20
    patch:
21
      default:
22
        target: 75%
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading