Compare 70a35e0 ... +1 ... 240d8f7

No flags found

Use flags to group coverage reports by test type, project and/or folders.
Then setup custom commit statuses and notifications for each flag.

e.g., #unittest #integration

#production #enterprise

#frontend #backend

Learn more about Codecov Flags here.

Showing 13 of 40 files from the diff.

@@ -34,7 +34,7 @@
Loading
34 34
convert_result(::typeof(waic), result) = todataframes(result)
35 35
convert_result(::typeof(r2_score), result) = todataframes(result)
36 36
function convert_result(::typeof(compare), result)
37 -
    return todataframes(result; index_name = :name)
37 +
    return todataframes(result; index_name=:name)
38 38
end
39 39
40 40
@doc doc"""
@@ -126,15 +126,15 @@
Loading
126 126
summarystats(idata; var_names = ["mu", "tau"], stat_funcs = func_dict, extend = false)
127 127
```
128 128
"""
129 -
function StatsBase.summarystats(data::InferenceData; group = :posterior, kwargs...)
129 +
function StatsBase.summarystats(data::InferenceData; group=:posterior, kwargs...)
130 130
    dataset = getproperty(data, Symbol(group))
131 131
    return summarystats(dataset; kwargs...)
132 132
end
133 -
function StatsBase.summarystats(data::Dataset; index_origin = 1, fmt = :wide, kwargs...)
134 -
    s = arviz.summary(data; index_origin = index_origin, fmt = fmt, kwargs...)
133 +
function StatsBase.summarystats(data::Dataset; index_origin=1, fmt=:wide, kwargs...)
134 +
    s = arviz.summary(data; index_origin=index_origin, fmt=fmt, kwargs...)
135 135
    s isa Dataset && return s
136 136
    index_name = Symbol(fmt) == :long ? :statistic : :variable
137 -
    return todataframes(s; index_name = index_name)
137 +
    return todataframes(s; index_name=index_name)
138 138
end
139 139
140 140
"""
@@ -150,12 +150,12 @@
Loading
150 150
151 151
# Keywords
152 152
153 -
- `coords::Dict{String,Vector}=nothing`: Map from named dimension to named indices.
154 -
- `dims::Dict{String,Vector{String}}=nothing`: Map from variable name to names of its
153 +
  - `coords::Dict{String,Vector}=nothing`: Map from named dimension to named indices.
154 +
  - `dims::Dict{String,Vector{String}}=nothing`: Map from variable name to names of its
155 155
    dimensions.
156 -
- `kwargs`: Keyword arguments passed to [`summarystats`](@ref).
156 +
  - `kwargs`: Keyword arguments passed to [`summarystats`](@ref).
157 157
"""
158 -
function summary(data; group = :posterior, coords = nothing, dims = nothing, kwargs...)
159 -
    dataset = convert_to_dataset(data; group = group, coords = coords, dims = dims)
158 +
function summary(data; group=:posterior, coords=nothing, dims=nothing, kwargs...)
159 +
    dataset = convert_to_dataset(data; group=group, coords=coords, dims=dims)
160 160
    return summarystats(dataset; kwargs...)
161 161
end

@@ -63,19 +63,20 @@
Loading
63 63
    ndraws, _, nchains = size(chns)
64 64
    loc_names_old = getfield(chns.name_map, section) # old may be Symbol or String
65 65
    loc_names = string.(loc_names_old)
66 -
    loc_str_to_old =
67 -
        Dict(name_str => name_old for (name_str, name_old) in zip(loc_names, loc_names_old))
66 +
    loc_str_to_old = Dict(
67 +
        name_str => name_old for (name_str, name_old) in zip(loc_names, loc_names_old)
68 +
    )
68 69
    vars_to_locs = varnames_locs_dict(loc_names, loc_str_to_old)
69 70
    vars_to_arrays = Dict{String,Array}()
70 71
    for (var_name, names_locs) in vars_to_locs
71 72
        loc_names, locs = names_locs
72 -
        max_loc = maximum(hcat([[loc...] for loc in locs]...); dims = 2)
73 +
        max_loc = maximum(hcat([[loc...] for loc in locs]...); dims=2)
73 74
        ndim = length(max_loc)
74 75
        sizes = tuple(max_loc...)
75 76
76 77
        oldarr = reshape_values(replacemissing(Array(chns.value[:, loc_names, :])))
77 78
        if ndim == 0
78 -
            arr = dropdims(oldarr; dims = 3)
79 +
            arr = dropdims(oldarr; dims=3)
79 80
        else
80 81
            arr = Array{Union{typeof(NaN),eltype(oldarr)}}(undef, nchains, ndraws, sizes...)
81 82
            fill!(arr, NaN)
@@ -89,10 +90,7 @@
Loading
89 90
end
90 91
91 92
function chains_to_dict(
92 -
    chns::Chains;
93 -
    ignore = String[],
94 -
    section = :parameters,
95 -
    rekey_fun = identity,
93 +
    chns::Chains; ignore=String[], section=:parameters, rekey_fun=identity
96 94
)
97 95
    section in sections(chns) || return Dict()
98 96
    chns_dict = section_dict(chns, section)
@@ -108,7 +106,7 @@
Loading
108 106
109 107
Remaining `kwargs` are forwarded to [`from_mcmcchains`](@ref).
110 108
"""
111 -
function convert_to_inference_data(chns::Chains; group = :posterior, kwargs...)
109 +
function convert_to_inference_data(chns::Chains; group=:posterior, kwargs...)
112 110
    group = Symbol(group)
113 111
    group == :posterior && return from_mcmcchains(chns; kwargs...)
114 112
    return from_mcmcchains(; group => chns)
@@ -169,16 +167,16 @@
Loading
169 167
    posterior_predictive,
170 168
    predictions,
171 169
    log_likelihood;
172 -
    library = MCMCChains,
170 +
    library=MCMCChains,
173 171
    kwargs...,
174 172
)
175 -
    kwargs = convert(Dict, merge((; dims = nothing), kwargs))
173 +
    kwargs = convert(Dict, merge((; dims=nothing), kwargs))
176 174
    library = string(library)
177 175
    rekey_fun = d -> rekey(d, stats_key_map)
178 176
179 177
    # Convert chains to dicts
180 178
    post_dict = chains_to_dict(posterior)
181 -
    stats_dict = chains_to_dict(posterior; section = :internals, rekey_fun = rekey_fun)
179 +
    stats_dict = chains_to_dict(posterior; section=:internals, rekey_fun=rekey_fun)
182 180
    stats_dict = enforce_stat_types(stats_dict)
183 181
184 182
    all_idata = InferenceData()
@@ -197,48 +195,45 @@
Loading
197 195
        if group_data isa Union{AbstractVector{String},NTuple{N,String} where {N}}
198 196
            group_data = popsubdict!(post_dict, group_data)
199 197
        end
200 -
        group_dataset = convert_to_dataset(group_data; library = library, kwargs...)
198 +
        group_dataset = convert_to_dataset(group_data; library=library, kwargs...)
201 199
        setattribute!(group_dataset, "inference_library", library)
202 200
        concat!(all_idata, InferenceData(; group => group_dataset))
203 201
    end
204 202
205 203
    attrs = attributes_dict(posterior)
206 204
    attrs = merge(attrs, Dict("inference_library" => library))
207 -
    kwargs = convert(Dict, merge((; attrs = attrs, dims = nothing), kwargs))
208 -
    post_idata = _from_dict(post_dict; sample_stats = stats_dict, kwargs...)
205 +
    kwargs = convert(Dict, merge((; attrs=attrs, dims=nothing), kwargs))
206 +
    post_idata = _from_dict(post_dict; sample_stats=stats_dict, kwargs...)
209 207
    concat!(all_idata, post_idata)
210 208
    return all_idata
211 209
end
212 210
function from_mcmcchains(
213 -
    posterior = nothing;
214 -
    posterior_predictive = nothing,
215 -
    predictions = nothing,
216 -
    prior = nothing,
217 -
    prior_predictive = nothing,
218 -
    observed_data = nothing,
219 -
    constant_data = nothing,
220 -
    predictions_constant_data = nothing,
221 -
    log_likelihood = nothing,
222 -
    library = MCMCChains,
211 +
    posterior=nothing;
212 +
    posterior_predictive=nothing,
213 +
    predictions=nothing,
214 +
    prior=nothing,
215 +
    prior_predictive=nothing,
216 +
    observed_data=nothing,
217 +
    constant_data=nothing,
218 +
    predictions_constant_data=nothing,
219 +
    log_likelihood=nothing,
220 +
    library=MCMCChains,
223 221
    kwargs...,
224 222
)
225 -
    kwargs = convert(Dict, merge((; dims = nothing, coords = nothing), kwargs))
223 +
    kwargs = convert(Dict, merge((; dims=nothing, coords=nothing), kwargs))
226 224
227 225
    all_idata = from_mcmcchains(
228 226
        posterior,
229 227
        posterior_predictive,
230 228
        predictions,
231 229
        log_likelihood;
232 -
        library = library,
230 +
        library=library,
233 231
        kwargs...,
234 232
    )
235 233
236 234
    if prior !== nothing
237 235
        pre_prior_idata = convert_to_inference_data(
238 -
            prior;
239 -
            posterior_predictive = prior_predictive,
240 -
            library = library,
241 -
            kwargs...,
236 +
            prior; posterior_predictive=prior_predictive, library=library, kwargs...
242 237
        )
243 238
        prior_idata = rekey(
244 239
            pre_prior_idata,
@@ -257,8 +252,7 @@
Loading
257 252
        :predictions_constant_data => predictions_constant_data,
258 253
    ]
259 254
        group_data === nothing && continue
260 -
        group_dataset =
261 -
            convert_to_constant_dataset(group_data; library = library, kwargs...)
255 +
        group_dataset = convert_to_constant_dataset(group_data; library=library, kwargs...)
262 256
        concat!(all_idata, InferenceData(; group => group_dataset))
263 257
    end
264 258
@@ -271,5 +265,5 @@
Loading
271 265
Call [`from_mcmcchains`](@ref) on output of `CmdStan`.
272 266
"""
273 267
function from_cmdstan(posterior::Chains; kwargs...)
274 -
    return from_mcmcchains(posterior; library = "CmdStan", kwargs...)
268 +
    return from_mcmcchains(posterior; library="CmdStan", kwargs...)
275 269
end

@@ -18,9 +18,10 @@
Loading
18 18
Convert `MonteCarloMeasurements.AbstractParticles` to an [`InferenceData`](@ref).
19 19
20 20
`obj` may have the following types:
21 -
- `::AbstractParticles`: Univariate draws from a single chain.
22 -
- `::AbstractVector{<:AbstractParticles}`: Univariate draws from a vector of chains.
23 -
- `::AbstractVector{<:AbstractArray{<:AbstractParticles}}`: Multivariate draws from a vector
21 +
22 +
  - `::AbstractParticles`: Univariate draws from a single chain.
23 +
  - `::AbstractVector{<:AbstractParticles}`: Univariate draws from a vector of chains.
24 +
  - `::AbstractVector{<:AbstractArray{<:AbstractParticles}}`: Multivariate draws from a vector
24 25
    of chains.
25 26
"""
26 27
function convert_to_inference_data(obj::AbstractParticles; kwargs...)
@@ -30,8 +31,7 @@
Loading
30 31
    return convert_to_inference_data(stack(stack.(obj)); kwargs...)
31 32
end
32 33
function convert_to_inference_data(
33 -
    obj::AbstractVector{<:AbstractArray{<:AbstractParticles}};
34 -
    kwargs...,
34 +
    obj::AbstractVector{<:AbstractArray{<:AbstractParticles}}; kwargs...
35 35
)
36 36
    return convert_to_inference_data(stack(stack.(obj)); kwargs...)
37 37
end

@@ -87,7 +87,7 @@
Loading
87 87
88 88
convert_to_inference_data(::Nothing; kwargs...) = InferenceData()
89 89
90 -
function convert_to_dataset(data::InferenceData; group = :posterior, kwargs...)
90 +
function convert_to_dataset(data::InferenceData; group=:posterior, kwargs...)
91 91
    group = Symbol(group)
92 92
    dataset = getproperty(data, group)
93 93
    return dataset
@@ -109,19 +109,13 @@
Loading
109 109
110 110
# A more flexible form of `from_dict`
111 111
# Internally calls `dict_to_dataset`
112 -
function _from_dict(
113 -
    posterior = nothing;
114 -
    attrs = Dict(),
115 -
    coords = nothing,
116 -
    dims = nothing,
117 -
    dicts...,
118 -
)
119 -
    dicts = (posterior = posterior, dicts...)
112 +
function _from_dict(posterior=nothing; attrs=Dict(), coords=nothing, dims=nothing, dicts...)
113 +
    dicts = (posterior=posterior, dicts...)
120 114
121 115
    datasets = []
122 116
    for (name, dict) in pairs(dicts)
123 117
        (dict === nothing || isempty(dict)) && continue
124 -
        dataset = dict_to_dataset(dict; attrs = attrs, coords = coords, dims = dims)
118 +
        dataset = dict_to_dataset(dict; attrs=attrs, coords=coords, dims=dims)
125 119
        push!(datasets, name => dataset)
126 120
    end
127 121
@@ -132,7 +126,7 @@
Loading
132 126
@doc forwarddoc(:concat) concat
133 127
134 128
function concat(data::InferenceData...; kwargs...)
135 -
    return arviz.concat(data...; inplace = false, kwargs...)
129 +
    return arviz.concat(data...; inplace=false, kwargs...)
136 130
end
137 131
138 132
Docs.getdoc(::typeof(concat)) = forwardgetdoc(:concat)
@@ -146,7 +140,7 @@
Loading
146 140
concat!
147 141
148 142
function concat!(data::InferenceData, other_data::InferenceData...; kwargs...)
149 -
    arviz.concat(data, other_data...; inplace = true, kwargs...)
143 +
    arviz.concat(data, other_data...; inplace=true, kwargs...)
150 144
    return data
151 145
end
152 146
concat!(; kwargs...) = InferenceData()
@@ -162,7 +156,7 @@
Loading
162 156
    return concat(data_new...)
163 157
end
164 158
165 -
function reorder_groups!(data::InferenceData; group_order = SUPPORTED_GROUPS)
159 +
function reorder_groups!(data::InferenceData; group_order=SUPPORTED_GROUPS)
166 160
    group_order = map(Symbol, group_order)
167 161
    names = groupnames(data)
168 162
    sorted_names = filter(n -> n ∈ names, group_order)

@@ -96,7 +96,7 @@
Loading
96 96
97 97
# Load ArviZ once at precompilation time for docstringS
98 98
copy!(arviz, import_arviz())
99 -
check_needs_update(update = false)
99 +
check_needs_update(; update=false)
100 100
const _precompile_arviz_version = arviz_version()
101 101
102 102
function __init__()

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Learn more Showing 1 files with coverage changes found.

Changes in src/rcparams.jl
-2
+2
Loading file...
Files Coverage
src 0.37% 88.50%
Project Totals (13 files) 88.50%
Loading