1 1
import sys
2 1
import dask
3 1
import dask.array as da
4 1
from dask.diagnostics import ProgressBar
5 1
import warnings
6 1
import numpy as np
7 1
from numba import njit, prange
8 1
from edt import edt
9 1
import operator as op
10 1
from tqdm import tqdm
11 1
import scipy.ndimage as spim
12 1
import scipy.spatial as sptl
13 1
from collections import namedtuple
14 1
from skimage.morphology import reconstruction
15 1
from skimage.segmentation import clear_border, watershed
16 1
from skimage.morphology import ball, disk, square, cube, diamond, octahedron
17 1
from porespy.tools import randomize_colors, fftmorphology
18 1
from porespy.tools import get_border, extend_slice, extract_subsection
19 1
from porespy.tools import _create_alias_map
20 1
from porespy.tools import ps_disk, ps_ball
21

22

23 1
def apply_padded(im, pad_width, func, pad_val=1, **kwargs):
24
    r"""
25
    Applies padding to an image before sending to ``func``, then extracts the
26
    result corresponding to the original image shape.
27

28
    Parameters
29
    ----------
30
    im : ND-image
31
        The image to which ``func`` should be applied
32
    pad_width : int or list of ints
33
        The amount of padding to apply to each axis.  Refer to ``numpy.pad``
34
        documentation for more details.
35
    pad_val : scalar
36
        The value to place into the padded voxels.  The default is 1 (or
37
        ``True``) which extends the pore space.
38
    func : function handle
39
        The function to apply to the padded image
40
    kwargs : additional keyword arguments
41
        All additional keyword arguments are collected and passed to ``func``.
42

43
    Notes
44
    -----
45
    A use case for this is when using ``skimage.morphology.skeletonize_3d``
46
    to ensure that the skeleton extends beyond the edges of the image.
47
    """
48 1
    padded = np.pad(im, pad_width=pad_width,
49
                    mode='constant', constant_values=pad_val)
50 1
    temp = func(padded, **kwargs)
51 1
    result = extract_subsection(im=temp, shape=im.shape)
52 1
    return result
53

54

55 1
def trim_small_clusters(im, size=1):
56
    r"""
57
    Remove isolated voxels or clusters of a given size or smaller
58

59
    Parameters
60
    ----------
61
    im : ND-array
62
        The binary image from which voxels are to be removed
63
    size : scalar
64
        The threshold size of clusters to trim.  As clusters with this many
65
        voxels or fewer will be trimmed.  The default is 1 so only single
66
        voxels are removed.
67

68
    Returns
69
    -------
70
    im : ND-image
71
        A copy of ``im`` with clusters of voxels smaller than the given
72
        ``size`` removed.
73

74
    """
75 0
    if im.ndim == 2:
76 0
        strel = disk(1)
77 0
    elif im.ndim == 3:
78 0
        strel = ball(1)
79
    else:
80 0
        raise Exception("Only 2D or 3D images are accepted")
81 0
    filtered_array = np.copy(im)
82 0
    labels, N = spim.label(filtered_array, structure=strel)
83 0
    id_sizes = np.array(spim.sum(im, labels, range(N + 1)))
84 0
    area_mask = id_sizes <= size
85 0
    filtered_array[area_mask[labels]] = 0
86 0
    return filtered_array
87

88

89 1
def hold_peaks(im, axis=-1):
90
    r"""
91
    Replaces each voxel with the highest value along the given axis
92

93
    Parameters
94
    ----------
95
    im : ND-image
96
        A greyscale image whose peaks are to
97
    """
98 0
    A = im
99 0
    B = np.swapaxes(A, axis, -1)
100 0
    updown = np.empty((*B.shape[:-1], B.shape[-1] + 1), B.dtype)
101 0
    updown[..., 0], updown[..., -1] = -1, -1
102 0
    np.subtract(B[..., 1:], B[..., :-1], out=updown[..., 1:-1])
103 0
    chnidx = np.where(updown)
104 0
    chng = updown[chnidx]
105 0
    (pkidx,) = np.where((chng[:-1] > 0) & (chng[1:] < 0) | (chnidx[-1][:-1] == 0))
106 0
    pkidx = (*map(op.itemgetter(pkidx), chnidx),)
107 0
    out = np.zeros_like(A)
108 0
    aux = out.swapaxes(axis, -1)
109 0
    aux[(*map(op.itemgetter(slice(1, None)), pkidx),)] = np.diff(B[pkidx])
110 0
    aux[..., 0] = B[..., 0]
111 0
    result = out.cumsum(axis=axis)
112 0
    return result
113

114

115 1
def distance_transform_lin(im, axis=0, mode="both"):
116
    r"""
117
    Replaces each void voxel with the linear distance to the nearest solid
118
    voxel along the specified axis.
119

120
    Parameters
121
    ----------
122
    im : ND-array
123
        The image of the porous material with ``True`` values indicating the
124
        void phase (or phase of interest)
125

126
    axis : int
127
        The direction along which the distance should be measured, the default
128
        is 0 (i.e. along the x-direction)
129

130
    mode : string
131
        Controls how the distance is measured.  Options are:
132

133
        'forward' - Distances are measured in the increasing direction along
134
        the specified axis
135

136
        'reverse' - Distances are measured in the reverse direction.
137
        *'backward'* is also accepted.
138

139
        'both' - Distances are calculated in both directions (by recursively
140
        calling itself), then reporting the minimum value of the two results.
141

142
    Returns
143
    -------
144
    image : ND-array
145
        A copy of ``im`` with each foreground voxel containing the distance to
146
        the nearest background along the specified axis.
147
    """
148
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
149
        warnings.warn((
150
            f"Input image conains a singleton axis: {im.shape}."
151
            " Reduce dimensionality with np.squeeze(im) to avoid"
152
            " unexpected behavior."
153
        ))
154 1
    if mode in ["backward", "reverse"]:
155 1
        im = np.flip(im, axis)
156 1
        im = distance_transform_lin(im=im, axis=axis, mode="forward")
157 1
        im = np.flip(im, axis)
158 1
        return im
159 1
    elif mode in ["both"]:
160 1
        im_f = distance_transform_lin(im=im, axis=axis, mode="forward")
161 1
        im_b = distance_transform_lin(im=im, axis=axis, mode="backward")
162 1
        return np.minimum(im_f, im_b)
163
    else:
164 1
        b = np.cumsum(im > 0, axis=axis)
165 1
        c = np.diff(b * (im == 0), axis=axis)
166 1
        d = np.minimum.accumulate(c, axis=axis)
167 1
        if im.ndim == 1:
168 0
            e = np.pad(d, pad_width=[1, 0], mode="constant", constant_values=0)
169 1
        elif im.ndim == 2:
170 1
            ax = [[[1, 0], [0, 0]], [[0, 0], [1, 0]]]
171 1
            e = np.pad(d, pad_width=ax[axis], mode="constant", constant_values=0)
172 1
        elif im.ndim == 3:
173 1
            ax = [
174
                [[1, 0], [0, 0], [0, 0]],
175
                [[0, 0], [1, 0], [0, 0]],
176
                [[0, 0], [0, 0], [1, 0]],
177
            ]
178 1
            e = np.pad(d, pad_width=ax[axis], mode="constant", constant_values=0)
179 1
        f = im * (b + e)
180 1
        return f
181

182

183 1
def snow_partitioning(im, dt=None, r_max=4, sigma=0.4, return_all=False,
184
                      mask=True, randomize=True):
185
    r"""
186
    Partitions the void space into pore regions using a marker-based watershed
187
    algorithm, with specially filtered peaks as markers.
188

189
    The SNOW network extraction algorithm (Sub-Network of an Over-segmented
190
    Watershed) was designed to handle to perculiarities of high porosity
191
    materials, but it applies well to other materials as well.
192

193
    Parameters
194
    ----------
195
    im : array_like
196
        A boolean image of the domain, with ``True`` indicating the pore space
197
        and ``False`` elsewhere.
198
    dt : array_like, optional
199
        The distance transform of the pore space.  This is done automatically
200
        if not provided, but if the distance transform has already been
201
        computed then supplying it can save some time.
202
    r_max : int
203
        The radius of the spherical structuring element to use in the Maximum
204
        filter stage that is used to find peaks.  The default is 4
205
    sigma : float
206
        The standard deviation of the Gaussian filter used in step 1.  The
207
        default is 0.4.  If 0 is given then the filter is not applied, which is
208
        useful if a distance transform is supplied as the ``im`` argument that
209
        has already been processed.
210
    return_all : boolean
211
        If set to ``True`` a named tuple is returned containing the original
212
        image, the distance transform, the filtered peaks, and the final
213
        pore regions.  The default is ``False``
214
    mask : boolean
215
        Apply a mask to the regions where the solid phase is.  Default is
216
        ``True``
217
    randomize : boolean
218
        If ``True`` (default), then the region colors will be randomized before
219
        returning.  This is helpful for visualizing otherwise neighboring
220
        regions have simlar coloring are are hard to distinguish.
221

222
    Returns
223
    -------
224
    image : ND-array
225
        An image the same shape as ``im`` with the void space partitioned into
226
        pores using a marker based watershed with the peaks found by the
227
        SNOW algorithm [1].
228

229
    Notes
230
    -----
231
    If ``return_all`` is ``True`` then a **named tuple** is returned containing
232
    all of the images used during the process.  They can be access as
233
    attriutes with the following names:
234

235
        * ``im``: The binary image of the void space
236
        * ``dt``: The distance transform of the image
237
        * ``peaks``: The peaks of the distance transform after applying the
238
        steps of the SNOW algorithm
239
        * ``regions``: The void space partitioned into pores using a marker
240
        based watershed with the peaks found by the SNOW algorithm
241

242
    References
243
    ----------
244
    [1] Gostick, J. "A versatile and efficient network extraction algorithm
245
    using marker-based watershed segmenation".  Physical Review E. (2017)
246

247
    """
248 1
    tup = namedtuple("results", field_names=["im", "dt", "peaks", "regions"])
249 1
    print("-" * 60)
250 1
    print("Beginning SNOW Algorithm")
251 1
    im_shape = np.array(im.shape)
252 1
    if im.dtype is not bool:
253 1
        print("Converting supplied image (im) to boolean")
254 1
        im = im > 0
255 1
    if dt is None:
256 1
        print("Peforming Distance Transform")
257 1
        if np.any(im_shape == 1):
258 1
            ax = np.where(im_shape == 1)[0][0]
259 1
            dt = edt(im.squeeze())
260 1
            dt = np.expand_dims(dt, ax)
261
        else:
262 1
            dt = edt(im)
263

264 1
    tup.im = im
265 1
    tup.dt = dt
266

267 1
    if sigma > 0:
268 1
        print("Applying Gaussian blur with sigma =", str(sigma))
269 1
        dt = spim.gaussian_filter(input=dt, sigma=sigma)
270

271 1
    peaks = find_peaks(dt=dt, r_max=r_max)
272 1
    print("Initial number of peaks: ", spim.label(peaks)[1])
273 1
    peaks = trim_saddle_points(peaks=peaks, dt=dt, max_iters=500)
274 1
    print("Peaks after trimming saddle points: ", spim.label(peaks)[1])
275 1
    peaks = trim_nearby_peaks(peaks=peaks, dt=dt)
276 1
    peaks, N = spim.label(peaks)
277 1
    print("Peaks after trimming nearby peaks: ", N)
278 1
    tup.peaks = peaks
279 1
    if mask:
280 1
        mask_solid = im > 0
281
    else:
282 0
        mask_solid = None
283 1
    regions = watershed(image=-dt, markers=peaks, mask=mask_solid)
284 1
    if randomize:
285 1
        regions = randomize_colors(regions)
286 1
    if return_all:
287 1
        tup.regions = regions
288 1
        return tup
289
    else:
290 1
        return regions
291

292

293 1
def snow_partitioning_n(im, r_max=4, sigma=0.4, return_all=True,
294
                        mask=True, randomize=False, alias=None):
295
    r"""
296
    This function partitions an imaging oontain an arbitrary number of phases
297
    into regions using a marker-based watershed segmentation. Its an extension
298
    of snow_partitioning function with all phases partitioned together.
299

300
    Parameters
301
    ----------
302
    im : ND-array
303
        Image of porous material where each phase is represented by unique
304
        integer starting from 1 (0's are ignored).
305
    r_max : scalar
306
        The radius of the spherical structuring element to use in the Maximum
307
        filter stage that is used to find peaks.  The default is 4.
308
    sigma : scalar
309
        The standard deviation of the Gaussian filter used.  The default is
310
        0.4. If 0 is given then the filter is not applied, which is useful if a
311
        distance transform is supplied as the ``im`` argument that has already
312
        been processed.
313
    return_all : boolean (default is False)
314
        If set to ``True`` a named tuple is returned containing the original
315
        image, the combined distance transform, list of each phase max label,
316
        and the final combined regions of all phases.
317
    mask : boolean (default is True)
318
        Apply a mask to the regions which are not under concern.
319
    randomize : boolean
320
        If ``True`` (default), then the region colors will be randomized before
321
        returning.  This is helpful for visualizing otherwise neighboring
322
        regions have similar coloring and are hard to distinguish.
323
    alias : dict (Optional)
324
        A dictionary that assigns unique image label to specific phases. For
325
        example {1: 'Solid'} will show all structural properties associated
326
        with label 1 as Solid phase properties. If ``None`` then default
327
        labelling will be used i.e {1: 'Phase1',..}.
328

329
    Returns
330
    -------
331
    An image the same shape as ``im`` with the all phases partitioned into
332
    regions using a marker based watershed with the peaks found by the
333
    SNOW algorithm [1].  If ``return_all`` is ``True`` then a **named tuple**
334
    is returned with the following attribute:
335

336
        * ``im`` : The actual image of the porous material
337
        * ``dt`` : The combined distance transform of the image
338
        * ``phase_max_label`` : The list of max label of each phase in order to
339
        distinguish between each other
340
        * ``regions`` : The partitioned regions of n phases using a marker
341
        based watershed with the peaks found by the SNOW algorithm
342

343
    References
344
    ----------
345
    [1] Gostick, J. "A versatile and efficient network extraction algorithm
346
    using marker-based watershed segmentation".  Physical Review E. (2017)
347

348
    [2] Khan, ZA et al. "Dual network extraction algorithm to investigate
349
    multiple transport processes in porous materials: Image-based modeling
350
    of pore and grain-scale processes".  Computers in Chemical Engineering.
351
    (2019)
352

353
    See Also
354
    ----------
355
    snow_partitioning
356

357
    Notes
358
    -----
359
    In principle it is possible to perform a distance transform on each
360
    phase separately, merge these into a single image, then apply the
361
    watershed only once. This, however, has been found to create edge artifacts
362
    between regions arising from the way watershed handles plateaus in the
363
    distance transform. To overcome this, this function applies the watershed
364
    to each of the distance transforms separately, then merges the segmented
365
    regions back into a single image.
366

367
    """
368
    # Get alias if provided by user
369 1
    al = _create_alias_map(im=im, alias=alias)
370
    # Perform snow on each phase and merge all segmentation and dt together
371 1
    phases_num = np.unique(im * 1)
372 1
    phases_num = np.trim_zeros(phases_num)
373 1
    combined_dt = 0
374 1
    combined_region = 0
375 1
    num = [0]
376 1
    for i, j in enumerate(phases_num):
377 1
        print("_" * 60)
378 1
        if alias is None:
379 1
            print("Processing Phase {}".format(j))
380
        else:
381 1
            print("Processing Phase {}".format(al[j]))
382 1
        phase_snow = snow_partitioning(
383
            im == j,
384
            dt=None,
385
            r_max=r_max,
386
            sigma=sigma,
387
            return_all=return_all,
388
            mask=mask,
389
            randomize=randomize,
390
        )
391 1
        combined_dt += phase_snow.dt
392 1
        phase_snow.regions *= phase_snow.im
393 1
        phase_snow.regions += num[i]
394 1
        phase_ws = phase_snow.regions * phase_snow.im
395 1
        phase_ws[phase_ws == num[i]] = 0
396 1
        combined_region += phase_ws
397 1
        num.append(np.amax(combined_region))
398 1
    if return_all:
399 1
        tup = namedtuple(
400
            "results", field_names=["im", "dt", "phase_max_label", "regions"]
401
        )
402 1
        tup.im = im
403 1
        tup.dt = combined_dt
404 1
        tup.phase_max_label = num[1:]
405 1
        tup.regions = combined_region
406 1
        return tup
407
    else:
408 0
        return combined_region
409

410

411 1
def find_peaks(dt, r_max=4, footprint=None, **kwargs):
412
    r"""
413
    Finds local maxima in the distance transform
414

415
    Parameters
416
    ----------
417
    dt : ND-array
418
        The distance transform of the pore space.  This may be calculated and
419
        filtered using any means desired.
420
    r_max : scalar
421
        The size of the structuring element used in the maximum filter.  This
422
        controls the localness of any maxima. The default is 4 voxels.
423
    footprint : ND-array
424
        Specifies the shape of the structuring element used to define the
425
        neighborhood when looking for peaks.  If ``None`` (the default) is
426
        specified then a spherical shape is used (or circular in 2D).
427

428
    Returns
429
    -------
430
    image : ND-array
431
        An array of booleans with ``True`` values at the location of any
432
        local maxima.
433

434
    Notes
435
    -----
436
    It is also possible ot the ``peak_local_max`` function from the
437
    ``skimage.feature`` module as follows:
438

439
    ``peaks = peak_local_max(image=dt, min_distance=r, exclude_border=0,
440
    indices=False)``
441

442
    The *skimage* function automatically uses a square structuring element
443
    which is significantly faster than using a circular or spherical element.
444
    """
445 1
    im = dt > 0
446
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
447
        warnings.warn((
448
            f"Input image conains a singleton axis: {im.shape}."
449
            " Reduce dimensionality with np.squeeze(im) to avoid"
450
            " unexpected behavior."
451
        ))
452 1
    if footprint is None:
453 1
        if im.ndim == 2:
454 1
            footprint = disk
455 1
        elif im.ndim == 3:
456 1
            footprint = ball
457
        else:
458 0
            raise Exception("only 2-d and 3-d images are supported")
459 1
    parallel = kwargs.pop('parallel', False)
460 1
    cores = kwargs.pop('cores', None)
461 1
    divs = kwargs.pop('cores', 2)
462 1
    if parallel:
463 1
        overlap = max(footprint(r_max).shape)
464 1
        peaks = chunked_func(func=find_peaks, overlap=overlap,
465
                             im_arg='dt', dt=dt, footprint=footprint,
466
                             cores=cores, divs=divs)
467
    else:
468 1
        mx = spim.maximum_filter(dt + 2 * (~im), footprint=footprint(r_max))
469 1
        peaks = (dt == mx) * im
470 1
    return peaks
471

472

473 1
def reduce_peaks(peaks):
474
    r"""
475
    Any peaks that are broad or elongated are replaced with a single voxel
476
    that is located at the center of mass of the original voxels.
477

478
    Parameters
479
    ----------
480
    peaks : ND-image
481
        An image containing ``True`` values indicating peaks in the distance
482
        transform
483

484
    Returns
485
    -------
486
    image : ND-array
487
        An array with the same number of isolated peaks as the original image,
488
        but fewer total ``True`` voxels.
489

490
    Notes
491
    -----
492
    The center of mass of a group of voxels is used as the new single voxel, so
493
    if the group has an odd shape (like a horse shoe), the new voxel may *not*
494
    lie on top of the original set.
495
    """
496 1
    if peaks.ndim == 2:
497 1
        strel = square
498
    else:
499 1
        strel = cube
500 1
    markers, N = spim.label(input=peaks, structure=strel(3))
501 1
    inds = spim.measurements.center_of_mass(
502
        input=peaks, labels=markers, index=np.arange(1, N + 1)
503
    )
504 1
    inds = np.floor(inds).astype(int)
505
    # Centroid may not be on old pixel, so create a new peaks image
506 1
    peaks_new = np.zeros_like(peaks, dtype=bool)
507 1
    peaks_new[tuple(inds.T)] = True
508 1
    return peaks_new
509

510

511 1
def trim_saddle_points(peaks, dt, max_iters=10, verbose=1):
512
    r"""
513
    Removes peaks that were mistakenly identified because they lied on a
514
    saddle or ridge in the distance transform that was not actually a true
515
    local peak.
516

517
    Parameters
518
    ----------
519
    peaks : ND-array
520
        A boolean image containing True values to mark peaks in the distance
521
        transform (``dt``)
522

523
    dt : ND-array
524
        The distance transform of the pore space for which the true peaks are
525
        sought.
526

527
    max_iters : int
528
        The maximum number of iterations to run while eroding the saddle
529
        points.  The default is 10, which is usually not reached; however,
530
        a warning is issued if the loop ends prior to removing all saddle
531
        points.
532

533
    Returns
534
    -------
535
    image : ND-array
536
        An image with fewer peaks than the input image
537

538
    References
539
    ----------
540
    [1] Gostick, J. "A versatile and efficient network extraction algorithm
541
    using marker-based watershed segmenation".  Physical Review E. (2017)
542

543
    """
544 1
    peaks = np.copy(peaks)
545 1
    if dt.ndim == 2:
546 1
        from skimage.morphology import square as cube
547
    else:
548 1
        from skimage.morphology import cube
549 1
    labels, N = spim.label(peaks)
550 1
    slices = spim.find_objects(labels)
551 1
    for i in range(N):
552 1
        s = extend_slice(s=slices[i], shape=peaks.shape, pad=10)
553 1
        peaks_i = labels[s] == i + 1
554 1
        dt_i = dt[s]
555 1
        im_i = dt_i > 0
556 1
        iters = 0
557 1
        peaks_dil = np.copy(peaks_i)
558 1
        while iters < max_iters:
559 1
            iters += 1
560 1
            peaks_dil = spim.binary_dilation(input=peaks_dil, structure=cube(3))
561 1
            peaks_max = peaks_dil * np.amax(dt_i * peaks_dil)
562 1
            peaks_extended = (peaks_max == dt_i) * im_i
563 1
            if np.all(peaks_extended == peaks_i):
564 1
                break  # Found a true peak
565 1
            elif np.sum(peaks_extended * peaks_i) == 0:
566 1
                peaks_i = False
567 1
                break  # Found a saddle point
568 1
        peaks[s] = peaks_i
569 1
        if iters >= max_iters and verbose:
570 0
            print(
571
                "Maximum number of iterations reached, consider "
572
                + "running again with a larger value of max_iters"
573
            )
574 1
    return peaks
575

576

577 1
def trim_nearby_peaks(peaks, dt):
578
    r"""
579
    Finds pairs of peaks that are nearer to each other than to the solid phase,
580
    and removes the peak that is closer to the solid.
581

582
    Parameters
583
    ----------
584
    peaks : ND-array
585
        A boolean image containing True values to mark peaks in the distance
586
        transform (``dt``)
587

588
    dt : ND-array
589
        The distance transform of the pore space for which the true peaks are
590
        sought.
591

592
    Returns
593
    -------
594
    image : ND-array
595
        An array the same size as ``peaks`` containing a subset of the peaks
596
        in the original image.
597

598
    Notes
599
    -----
600
    Each pair of peaks is considered simultaneously, so for a triplet of peaks
601
    each pair is considered.  This ensures that only the single peak that is
602
    furthest from the solid is kept.  No iteration is required.
603

604
    References
605
    ----------
606
    [1] Gostick, J. "A versatile and efficient network extraction algorithm
607
    using marker-based watershed segmenation".  Physical Review E. (2017)
608
    """
609 1
    peaks = np.copy(peaks)
610 1
    if dt.ndim == 2:
611 1
        from skimage.morphology import square as cube
612
    else:
613 1
        from skimage.morphology import cube
614 1
    peaks, N = spim.label(peaks, structure=cube(3))
615 1
    crds = spim.measurements.center_of_mass(peaks, labels=peaks,
616
                                            index=np.arange(1, N + 1))
617 1
    crds = np.vstack(crds).astype(int)  # Convert to numpy array of ints
618
    # Get distance between each peak as a distance map
619 1
    tree = sptl.cKDTree(data=crds)
620 1
    temp = tree.query(x=crds, k=2)
621 1
    nearest_neighbor = temp[1][:, 1]
622 1
    dist_to_neighbor = temp[0][:, 1]
623 1
    del temp, tree  # Free-up memory
624 1
    dist_to_solid = dt[tuple(crds.T)]  # Get distance to solid for each peak
625 1
    hits = np.where(dist_to_neighbor < dist_to_solid)[0]
626
    # Drop peak that is closer to the solid than it's neighbor
627 1
    drop_peaks = []
628 1
    for peak in hits:
629 1
        if dist_to_solid[peak] < dist_to_solid[nearest_neighbor[peak]]:
630 1
            drop_peaks.append(peak)
631
        else:
632 1
            drop_peaks.append(nearest_neighbor[peak])
633 1
    drop_peaks = np.unique(drop_peaks)
634
    # Remove peaks from image
635 1
    slices = spim.find_objects(input=peaks)
636 1
    for s in drop_peaks:
637 1
        peaks[slices[s]] = 0
638 1
    return peaks > 0
639

640

641 1
def find_disconnected_voxels(im, conn=None):
642
    r"""
643
    This identifies all pore (or solid) voxels that are not connected to the
644
    edge of the image.  This can be used to find blind pores, or remove
645
    artifacts such as solid phase voxels that are floating in space.
646

647
    Parameters
648
    ----------
649
    im : ND-image
650
        A Boolean image, with True values indicating the phase for which
651
        disconnected voxels are sought.
652
    conn : int
653
        For 2D the options are 4 and 8 for square and diagonal neighbors, while
654
        for the 3D the options are 6 and 26, similarily for square and diagonal
655
        neighbors.  The default is the maximum option.
656

657
    Returns
658
    -------
659
    image : ND-array
660
        An ND-image the same size as ``im``, with True values indicating
661
        voxels of the phase of interest (i.e. True values in the original
662
        image) that are not connected to the outer edges.
663

664
    Notes
665
    -----
666
    image : ND-array
667
        The returned array (e.g. ``holes``) be used to trim blind pores from
668
        ``im`` using: ``im[holes] = False``
669

670
    """
671
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
672
        warnings.warn((
673
            f"Input image conains a singleton axis: {im.shape}."
674
            " Reduce dimensionality with np.squeeze(im) to avoid"
675
            " unexpected behavior."
676
        ))
677 1
    if im.ndim == 2:
678 1
        if conn == 4:
679 1
            strel = disk(1)
680 1
        elif conn in [None, 8]:
681 1
            strel = square(3)
682
        else:
683 0
            raise Exception("Received conn is not valid")
684 1
    elif im.ndim == 3:
685 1
        if conn == 6:
686 1
            strel = ball(1)
687 1
        elif conn in [None, 26]:
688 1
            strel = cube(3)
689
        else:
690 0
            raise Exception("Received conn is not valid")
691 1
    labels, N = spim.label(input=im, structure=strel)
692 1
    holes = clear_border(labels=labels) > 0
693 1
    return holes
694

695

696 1
def fill_blind_pores(im, conn=None):
697
    r"""
698
    Fills all pores that are not connected to the edges of the image.
699

700
    Parameters
701
    ----------
702
    im : ND-array
703
        The image of the porous material
704

705
    Returns
706
    -------
707
    image : ND-array
708
        A version of ``im`` but with all the disconnected pores removed.
709
    conn : int
710
        For 2D the options are 4 and 8 for square and diagonal neighbors, while
711
        for the 3D the options are 6 and 26, similarily for square and diagonal
712
        neighbors.  The default is the maximum option.
713

714
    See Also
715
    --------
716
    find_disconnected_voxels
717

718
    """
719 1
    im = np.copy(im)
720 1
    holes = find_disconnected_voxels(im, conn=conn)
721 1
    im[holes] = False
722 1
    return im
723

724

725 1
def trim_floating_solid(im, conn=None):
726
    r"""
727
    Removes all solid that that is not attached to the edges of the image.
728

729
    Parameters
730
    ----------
731
    im : ND-array
732
        The image of the porous material
733
    conn : int
734
        For 2D the options are 4 and 8 for square and diagonal neighbors, while
735
        for the 3D the options are 6 and 26, similarily for square and diagonal
736
        neighbors.  The default is the maximum option.
737

738
    Returns
739
    -------
740
    image : ND-array
741
        A version of ``im`` but with all the disconnected solid removed.
742

743
    See Also
744
    --------
745
    find_disconnected_voxels
746

747
    """
748 1
    im = np.copy(im)
749 1
    holes = find_disconnected_voxels(~im, conn=conn)
750 1
    im[holes] = True
751 1
    return im
752

753

754 1
def trim_nonpercolating_paths(im, inlet_axis=0, outlet_axis=0,
755
                              inlets=None, outlets=None):
756
    r"""
757
    Removes all nonpercolating paths between specified edges
758

759
    This function is essential when performing transport simulations on an
760
    image, since image regions that do not span between the desired inlet and
761
    outlet do not contribute to the transport.
762

763
    Parameters
764
    ----------
765
    im : ND-array
766
        The image of the porous material with ```True`` values indicating the
767
        phase of interest
768
    inlet_axis : int
769
        Inlet axis of boundary condition. For three dimensional image the
770
        number ranges from 0 to 2. For two dimensional image the range is
771
        between 0 to 1. If ``inlets`` is given then this argument is ignored.
772
    outlet_axis : int
773
        Outlet axis of boundary condition. For three dimensional image the
774
        number ranges from 0 to 2. For two dimensional image the range is
775
        between 0 to 1. If ``outlets`` is given then this argument is ignored.
776
    inlets : ND-image (optional)
777
        A boolean mask indicating locations of inlets.  If this argument is
778
        supplied then ``inlet_axis`` is ignored.
779
    outlets : ND-image (optional)
780
        A boolean mask indicating locations of outlets. If this argument is
781
        supplied then ``outlet_axis`` is ignored.
782

783
    Returns
784
    -------
785
    image : ND-array
786
        A copy of ``im`` with all the nonpercolating paths removed
787

788
    See Also
789
    --------
790
    find_disconnected_voxels
791
    trim_floating_solid
792
    trim_blind_pores
793

794
    """
795
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
796
        warnings.warn((
797
            f"Input image conains a singleton axis: {im.shape}."
798
            " Reduce dimensionality with np.squeeze(im) to avoid"
799
            " unexpected behavior."
800
        ))
801 1
    im = trim_floating_solid(~im)
802 1
    labels = spim.label(~im)[0]
803 1
    if inlets is None:
804 1
        inlets = np.zeros_like(im, dtype=bool)
805 1
        if im.ndim == 3:
806 1
            if inlet_axis == 0:
807 1
                inlets[0, :, :] = True
808 1
            elif inlet_axis == 1:
809 1
                inlets[:, 0, :] = True
810 1
            elif inlet_axis == 2:
811 1
                inlets[:, :, 0] = True
812 1
        if im.ndim == 2:
813 1
            if inlet_axis == 0:
814 1
                inlets[0, :] = True
815 1
            elif inlet_axis == 1:
816 1
                inlets[:, 0] = True
817 1
    if outlets is None:
818 1
        outlets = np.zeros_like(im, dtype=bool)
819 1
        if im.ndim == 3:
820 1
            if outlet_axis == 0:
821 1
                outlets[-1, :, :] = True
822 1
            elif outlet_axis == 1:
823 1
                outlets[:, -1, :] = True
824 1
            elif outlet_axis == 2:
825 1
                outlets[:, :, -1] = True
826 1
        if im.ndim == 2:
827 1
            if outlet_axis == 0:
828 1
                outlets[-1, :] = True
829 1
            elif outlet_axis == 1:
830 1
                outlets[:, -1] = True
831 1
    IN = np.unique(labels * inlets)
832 1
    OUT = np.unique(labels * outlets)
833 1
    new_im = np.isin(labels, list(set(IN) ^ set(OUT)), invert=True)
834 1
    im[new_im == 0] = True
835 1
    return ~im
836

837

838 1
def trim_extrema(im, h, mode="maxima"):
839
    r"""
840
    Trims local extrema in greyscale values by a specified amount.
841

842
    This essentially decapitates peaks and/or floods valleys.
843

844
    Parameters
845
    ----------
846
    im : ND-array
847
        The image whose extrema are to be removed
848

849
    h : float
850
        The height to remove from each peak or fill in each valley
851

852
    mode : string {'maxima' | 'minima' | 'extrema'}
853
        Specifies whether to remove maxima or minima or both
854

855
    Returns
856
    -------
857
    image : ND-array
858
        A copy of the input image with all the peaks and/or valleys removed.
859

860
    Notes
861
    -----
862
    This function is referred to as **imhmax** or **imhmin** in Matlab.
863

864
    """
865 1
    result = im
866 1
    if mode in ["maxima", "extrema"]:
867 1
        result = reconstruction(seed=im - h, mask=im, method="dilation")
868 1
    elif mode in ["minima", "extrema"]:
869 1
        result = reconstruction(seed=im + h, mask=im, method="erosion")
870 1
    return result
871

872

873 1
def flood(im, regions=None, mode="max"):
874
    r"""
875
    Floods/fills each region in an image with a single value based on the
876
    specific values in that region.
877

878
    The ``mode`` argument is used to determine how the value is calculated.
879

880
    Parameters
881
    ----------
882
    im : array_like
883
        An image with isolated regions with numerical values in each voxel,
884
        and 0's elsewhere.
885
    regions : array_like
886
        An array the same shape as ``im`` with each region labeled.  If
887
        ``None`` is supplied (default) then ``scipy.ndimage.label`` is used
888
        with its default arguments.
889
    mode : string
890
        Specifies how to determine which value should be used to flood each
891
        region.  Options are:
892

893
        'maximum' - Floods each region with the local maximum in that region
894

895
        'minimum' - Floods each region the local minimum in that region
896

897
        'median' - Floods each region the local median in that region
898

899
        'mean' - Floods each region the local mean in that region
900

901
        'size' - Floods each region with the size of that region
902

903
    Returns
904
    -------
905
    image : ND-array
906
        A copy of ``im`` with new values placed in each forground voxel based
907
        on the ``mode``.
908

909
    See Also
910
    --------
911
    prop_to_image
912

913
    """
914 1
    mask = im > 0
915 1
    if regions is None:
916 1
        labels, N = spim.label(mask)
917
    else:
918 0
        labels = np.copy(regions)
919 0
        N = labels.max()
920 1
    mode = "sum" if mode == "size" else mode
921 1
    mode = "maximum" if mode == "max" else mode
922 1
    mode = "minimum" if mode == "min" else mode
923 1
    if mode in ["mean", "median", "maximum", "minimum", "sum"]:
924 1
        f = getattr(spim, mode)
925 1
        vals = f(input=im, labels=labels, index=range(0, N + 1))
926 1
        im_flooded = vals[labels]
927 1
        im_flooded = im_flooded * mask
928
    else:
929 0
        raise Exception(mode + " is not a recognized mode")
930 1
    return im_flooded
931

932

933 1
def find_dt_artifacts(dt):
934
    r"""
935
    Finds points in a distance transform that are closer to wall than solid.
936

937
    These points could *potentially* be erroneously high since their distance
938
    values do not reflect the possibility that solid may have been present
939
    beyond the border of the image but lost by trimming.
940

941
    Parameters
942
    ----------
943
    dt : ND-array
944
        The distance transform of the phase of interest
945

946
    Returns
947
    -------
948
    image : ND-array
949
        An ND-array the same shape as ``dt`` with numerical values indicating
950
        the maximum amount of error in each volxel, which is found by
951
        subtracting the distance to nearest edge of image from the distance
952
        transform value. In other words, this is the error that would be found
953
        if there were a solid voxel lurking just beyond the nearest edge of
954
        the image.  Obviously, voxels with a value of zero have no error.
955

956
    """
957 1
    temp = np.ones(shape=dt.shape) * np.inf
958 1
    for ax in range(dt.ndim):
959 1
        dt_lin = distance_transform_lin(np.ones_like(temp, dtype=bool),
960
                                        axis=ax, mode="both")
961 1
        temp = np.minimum(temp, dt_lin)
962 1
    result = np.clip(dt - temp, a_min=0, a_max=np.inf)
963 1
    return result
964

965

966 1
def region_size(im):
967
    r"""
968
    Replace each voxel with size of region to which it belongs
969

970
    Parameters
971
    ----------
972
    im : ND-array
973
        Either a boolean image wtih ``True`` indicating the features of
974
        interest, in which case ``scipy.ndimage.label`` will be applied to
975
        find regions, or a greyscale image with integer values indicating
976
        regions.
977

978
    Returns
979
    -------
980
    image : ND-array
981
        A copy of ``im`` with each voxel value indicating the size of the
982
        region to which it belongs.  This is particularly useful for finding
983
        chord sizes on the image produced by ``apply_chords``.
984

985
    See Also
986
    --------
987
    flood
988
    """
989 1
    if im.dtype == bool:
990 1
        im = spim.label(im)[0]
991 1
    counts = np.bincount(im.flatten())
992 1
    counts[0] = 0
993 1
    chords = counts[im]
994 1
    return chords
995

996

997 1
def apply_chords(im, spacing=1, axis=0, trim_edges=True, label=False):
998
    r"""
999
    Adds chords to the void space in the specified direction.  The chords are
1000
    separated by 1 voxel plus the provided spacing.
1001

1002
    Parameters
1003
    ----------
1004
    im : ND-array
1005
        An image of the porous material with void marked as ``True``.
1006

1007
    spacing : int
1008
        Separation between chords.  The default is 1 voxel.  This can be
1009
        decreased to 0, meaning that the chords all touch each other, which
1010
        automatically sets to the ``label`` argument to ``True``.
1011

1012
    axis : int (default = 0)
1013
        The axis along which the chords are drawn.
1014

1015
    trim_edges : bool (default = ``True``)
1016
        Whether or not to remove chords that touch the edges of the image.
1017
        These chords are artifically shortened, so skew the chord length
1018
        distribution.
1019

1020
    label : bool (default is ``False``)
1021
        If ``True`` the chords in the returned image are each given a unique
1022
        label, such that all voxels lying on the same chord have the same
1023
        value.  This is automatically set to ``True`` if spacing is 0, but is
1024
        ``False`` otherwise.
1025

1026
    Returns
1027
    -------
1028
    image : ND-array
1029
        A copy of ``im`` with non-zero values indicating the chords.
1030

1031
    See Also
1032
    --------
1033
    apply_chords_3D
1034

1035
    """
1036
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
1037
        warnings.warn((
1038
            f"Input image conains a singleton axis: {im.shape}."
1039
            " Reduce dimensionality with np.squeeze(im) to avoid"
1040
            " unexpected behavior."
1041
        ))
1042 1
    if spacing < 0:
1043 1
        raise Exception("Spacing cannot be less than 0")
1044 1
    if spacing == 0:
1045 0
        label = True
1046 1
    result = np.zeros(im.shape, dtype=int)  # Will receive chords at end
1047 1
    slxyz = [slice(None, None, spacing * (axis != i) + 1) for i in [0, 1, 2]]
1048 1
    slices = tuple(slxyz[: im.ndim])
1049 1
    s = [[0, 1, 0], [0, 1, 0], [0, 1, 0]]  # Straight-line structuring element
1050 1
    if im.ndim == 3:  # Make structuring element 3D if necessary
1051 1
        s = np.pad(np.atleast_3d(s), pad_width=((0, 0), (0, 0), (1, 1)),
1052
                   mode="constant", constant_values=0)
1053 1
    im = im[slices]
1054 1
    s = np.swapaxes(s, 0, axis)
1055 1
    chords = spim.label(im, structure=s)[0]
1056 1
    if trim_edges:  # Label on border chords will be set to 0
1057 1
        chords = clear_border(chords)
1058 1
    result[slices] = chords  # Place chords into empty image created at top
1059 1
    if label is False:  # Remove label if not requested
1060 1
        result = result > 0
1061 1
    return result
1062

1063

1064 1
def apply_chords_3D(im, spacing=0, trim_edges=True):
1065
    r"""
1066
    Adds chords to the void space in all three principle directions.  The
1067
    chords are seprated by 1 voxel plus the provided spacing.  Chords in the X,
1068
    Y and Z directions are labelled 1, 2 and 3 resepctively.
1069

1070
    Parameters
1071
    ----------
1072
    im : ND-array
1073
        A 3D image of the porous material with void space marked as True.
1074

1075
    spacing : int (default = 0)
1076
        Chords are automatically separed by 1 voxel on all sides, and this
1077
        argument increases the separation.
1078

1079
    trim_edges : bool (default is ``True``)
1080
        Whether or not to remove chords that touch the edges of the image.
1081
        These chords are artifically shortened, so skew the chord length
1082
        distribution
1083

1084
    Returns
1085
    -------
1086
    image : ND-array
1087
        A copy of ``im`` with values of 1 indicating x-direction chords,
1088
        2 indicating y-direction chords, and 3 indicating z-direction chords.
1089

1090
    Notes
1091
    -----
1092
    The chords are separated by a spacing of at least 1 voxel so that tools
1093
    that search for connected components, such as ``scipy.ndimage.label`` can
1094
    detect individual chords.
1095

1096
    See Also
1097
    --------
1098
    apply_chords
1099

1100
    """
1101
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
1102
        warnings.warn((
1103
            f"Input image conains a singleton axis: {im.shape}."
1104
            " Reduce dimensionality with np.squeeze(im) to avoid"
1105
            " unexpected behavior."
1106
        ))
1107 1
    if im.ndim < 3:
1108 0
        raise Exception("Must be a 3D image to use this function")
1109 1
    if spacing < 0:
1110 0
        raise Exception("Spacing cannot be less than 0")
1111 1
    ch = np.zeros_like(im, dtype=int)
1112 1
    ch[:, :: 4 + 2 * spacing, :: 4 + 2 * spacing] = 1       # X-direction
1113 1
    ch[:: 4 + 2 * spacing, :, 2::4 + 2 * spacing] = 2     # Y-direction
1114 1
    ch[2::4 + 2 * spacing, 2::4 + 2 * spacing, :] = 3   # Z-direction
1115 1
    chords = ch * im
1116 1
    if trim_edges:
1117 1
        temp = clear_border(spim.label(chords > 0)[0]) > 0
1118 1
        chords = temp * chords
1119 1
    return chords
1120

1121

1122 1
def local_thickness(im, sizes=25, mode="hybrid", **kwargs):
1123
    r"""
1124
    For each voxel, this function calculates the radius of the largest sphere
1125
    that both engulfs the voxel and fits entirely within the foreground.
1126

1127
    This is not the same as a simple distance transform, which finds the
1128
    largest sphere that could be *centered* on each voxel.
1129

1130
    Parameters
1131
    ----------
1132
    im : array_like
1133
        A binary image with the phase of interest set to True
1134

1135
    sizes : array_like or scalar
1136
        The sizes to invade.  If a list of values of provided they are used
1137
        directly.  If a scalar is provided then that number of points spanning
1138
        the min and max of the distance transform are used.
1139

1140
    mode : string
1141
        Controls with method is used to compute the result.  Options are:
1142

1143
        'hybrid' - (default) Performs a distance tranform of the void space,
1144
        thresholds to find voxels larger than ``sizes[i]``, trims the resulting
1145
        mask if ``access_limitations`` is ``True``, then dilates it using the
1146
        efficient fft-method to obtain the non-wetting fluid configuration.
1147

1148
        'dt' - Same as 'hybrid', except uses a second distance transform,
1149
        relative to the thresholded mask, to find the invading fluid
1150
        configuration.  The choice of 'dt' or 'hybrid' depends on speed, which
1151
        is system and installation specific.
1152

1153
        'mio' - Using a single morphological image opening step to obtain the
1154
        invading fluid confirguration directly, *then* trims if
1155
        ``access_limitations`` is ``True``.  This method is not ideal and is
1156
        included mostly for comparison purposes.
1157

1158
    Returns
1159
    -------
1160
    image : ND-array
1161
        A copy of ``im`` with the pore size values in each voxel
1162

1163
    See Also
1164
    --------
1165
    porosimetry
1166

1167
    Notes
1168
    -----
1169
    The term *foreground* is used since this function can be applied to both
1170
    pore space or the solid, whichever is set to ``True``.
1171

1172
    This function is identical to ``porosimetry`` with ``access_limited`` set
1173
    to ``False``.
1174

1175
    The way local thickness is found in PoreSpy differs from the traditional
1176
    method (i.e. `used in ImageJ <https://imagej.net/Local_Thickness>`_).
1177
    Our approach is probably slower, but it allows for the same code to be
1178
    used for ``local_thickness`` and ``porosimetry``, since we can 'trim'
1179
    invaded regions that are not connected to the inlets in the ``porosimetry``
1180
    function.  This is not needed in ``local_thickness`` however.
1181

1182
    """
1183 1
    im_new = porosimetry(im=im, sizes=sizes, access_limited=False, mode=mode,
1184
                         **kwargs)
1185 1
    return im_new
1186

1187

1188 1
def porosimetry(im, sizes=25, inlets=None, access_limited=True, mode='hybrid',
1189
                fft=True, **kwargs):
1190
    r"""
1191
    Performs a porosimetry simulution on an image
1192

1193
    Parameters
1194
    ----------
1195
    im : ND-array
1196
        An ND image of the porous material containing ``True`` values in the
1197
        pore space.
1198

1199
    sizes : array_like or scalar
1200
        The sizes to invade.  If a list of values of provided they are used
1201
        directly.  If a scalar is provided then that number of points spanning
1202
        the min and max of the distance transform are used.
1203

1204
    inlets : ND-array, boolean
1205
        A boolean mask with ``True`` values indicating where the invasion
1206
        enters the image.  By default all faces are considered inlets,
1207
        akin to a mercury porosimetry experiment.  Users can also apply
1208
        solid boundaries to their image externally before passing it in,
1209
        allowing for complex inlets like circular openings, etc.  This argument
1210
        is only used if ``access_limited`` is ``True``.
1211

1212
    access_limited : Boolean
1213
        This flag indicates if the intrusion should only occur from the
1214
        surfaces (``access_limited`` is ``True``, which is the default), or
1215
        if the invading phase should be allowed to appear in the core of
1216
        the image.  The former simulates experimental tools like mercury
1217
        intrusion porosimetry, while the latter is useful for comparison
1218
        to gauge the extent of shielding effects in the sample.
1219

1220
    mode : string
1221
        Controls with method is used to compute the result.  Options are:
1222

1223
        'hybrid' - (default) Performs a distance tranform of the void space,
1224
        thresholds to find voxels larger than ``sizes[i]``, trims the resulting
1225
        mask if ``access_limitations`` is ``True``, then dilates it using the
1226
        efficient fft-method to obtain the non-wetting fluid configuration.
1227

1228
        'dt' - Same as 'hybrid', except uses a second distance transform,
1229
        relative to the thresholded mask, to find the invading fluid
1230
        configuration.  The choice of 'dt' or 'hybrid' depends on speed, which
1231
        is system and installation specific.
1232

1233
        'mio' - Using a single morphological image opening step to obtain the
1234
        invading fluid confirguration directly, *then* trims if
1235
        ``access_limitations`` is ``True``.  This method is not ideal and is
1236
        included mostly for comparison purposes.  The morphological operations
1237
        are done using fft-based method implementations.
1238

1239
    fft : boolean (default is ``True``)
1240
        Indicates whether to use the ``fftmorphology`` function in
1241
        ``porespy.filters`` or to use the standard morphology functions in
1242
        ``scipy.ndimage``.  Always use ``fft=True`` unless you have a good
1243
        reason not to.
1244

1245
    Returns
1246
    -------
1247
    image : ND-array
1248
        A copy of ``im`` with voxel values indicating the sphere radius at
1249
        which it becomes accessible from the inlets.  This image can be used
1250
        to find invading fluid configurations as a function of applied
1251
        capillary pressure by applying a boolean comparison:
1252
        ``inv_phase = im > r`` where ``r`` is the radius (in voxels) of the
1253
        invading sphere.  Of course, ``r`` can be converted to capillary
1254
        pressure using a preferred model.
1255

1256
    Notes
1257
    -----
1258
    There are many ways to perform this filter, and PoreSpy offers 3, which
1259
    users can choose between via the ``mode`` argument.  These methods all
1260
    work in a similar way by finding which foreground voxels can accomodate
1261
    a sphere of a given radius, then repeating for smaller radii.
1262

1263
    See Also
1264
    --------
1265
    local_thickness
1266

1267
    """
1268
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
1269
        warnings.warn((
1270
            f"Input image conains a singleton axis: {im.shape}."
1271
            " Reduce dimensionality with np.squeeze(im) to avoid"
1272
            " unexpected behavior."
1273
        ))
1274

1275 1
    dt = edt(im > 0)
1276

1277 1
    if inlets is None:
1278 1
        inlets = get_border(im.shape, mode="faces")
1279

1280 1
    if isinstance(sizes, int):
1281 1
        sizes = np.logspace(start=np.log10(np.amax(dt)), stop=0, num=sizes)
1282
    else:
1283 1
        sizes = np.unique(sizes)[-1::-1]
1284

1285 1
    if im.ndim == 2:
1286 1
        strel = ps_disk
1287
    else:
1288 1
        strel = ps_ball
1289

1290
    # Parse kwargs for any parallelization arguments
1291 1
    parallel = kwargs.pop('parallel', False)
1292 1
    cores = kwargs.pop('cores', None)
1293 1
    divs = kwargs.pop('divs', 2)
1294

1295 1
    if mode == "mio":
1296 1
        pw = int(np.floor(dt.max()))
1297 1
        impad = np.pad(im, mode="symmetric", pad_width=pw)
1298 1
        inlets = np.pad(inlets, mode="symmetric", pad_width=pw)
1299
        # sizes = np.unique(np.around(sizes, decimals=0).astype(int))[-1::-1]
1300 1
        imresults = np.zeros(np.shape(impad))
1301 1
        pbar = tqdm(sizes, file=sys.stdout)
1302 1
        for r in sizes:
1303 1
            pbar.update()
1304 1
            if parallel:
1305 1
                imtemp = chunked_func(func=spim.binary_erosion,
1306
                                      input=impad, structure=strel(r),
1307
                                      overlap=int(2*r) + 1,
1308
                                      cores=cores, divs=divs)
1309 1
            elif fft:
1310 1
                imtemp = fftmorphology(impad, strel(r), mode="erosion")
1311
            else:
1312 1
                imtemp = spim.binary_erosion(input=impad,
1313
                                             structure=strel(r))
1314 1
            if access_limited:
1315 1
                imtemp = trim_disconnected_blobs(imtemp, inlets)
1316 1
            if parallel:
1317 1
                imtemp = chunked_func(func=spim.binary_dilation,
1318
                                      input=imtemp, structure=strel(r),
1319
                                      overlap=int(2*r) + 1,
1320
                                      cores=cores, divs=divs)
1321 1
            elif fft:
1322 1
                imtemp = fftmorphology(imtemp, strel(r), mode="dilation")
1323
            else:
1324 1
                imtemp = spim.binary_dilation(input=imtemp,
1325
                                              structure=strel(r))
1326 1
            if np.any(imtemp):
1327 1
                imresults[(imresults == 0) * imtemp] = r
1328 1
        imresults = extract_subsection(imresults, shape=im.shape)
1329 1
    elif mode == "dt":
1330 1
        imresults = np.zeros(np.shape(im))
1331 1
        pbar = tqdm(sizes, file=sys.stdout)
1332 1
        for r in sizes:
1333 1
            pbar.update()
1334 1
            imtemp = dt >= r
1335 1
            if access_limited:
1336 1
                imtemp = trim_disconnected_blobs(imtemp, inlets)
1337 1
            if np.any(imtemp):
1338 1
                imtemp = edt(~imtemp) < r
1339 1
                imresults[(imresults == 0) * imtemp] = r
1340 1
    elif mode == "hybrid":
1341 1
        imresults = np.zeros(np.shape(im))
1342 1
        pbar = tqdm(sizes, file=sys.stdout)
1343 1
        for r in sizes:
1344 1
            pbar.update()
1345 1
            imtemp = dt >= r
1346 1
            if access_limited:
1347 1
                imtemp = trim_disconnected_blobs(imtemp, inlets)
1348 1
            if np.any(imtemp):
1349 1
                if parallel:
1350 0
                    imtemp = chunked_func(func=spim.binary_dilation,
1351
                                          input=imtemp, structure=strel(r),
1352
                                          overlap=int(2*r) + 1,
1353
                                          cores=cores, divs=divs)
1354 1
                elif fft:
1355 1
                    imtemp = fftmorphology(imtemp, strel(r),
1356
                                           mode="dilation")
1357
                else:
1358 1
                    imtemp = spim.binary_dilation(input=imtemp,
1359
                                                  structure=strel(r))
1360 1
                imresults[(imresults == 0) * imtemp] = r
1361
    else:
1362 0
        raise Exception("Unrecognized mode " + mode)
1363 1
    return imresults
1364

1365

1366 1
def trim_disconnected_blobs(im, inlets, strel=None):
1367
    r"""
1368
    Removes foreground voxels not connected to specified inlets
1369

1370
    Parameters
1371
    ----------
1372
    im : ND-array
1373
        The image containing the blobs to be trimmed
1374
    inlets : ND-array or tuple of indices
1375
        The locations of the inlets.  Can either be a boolean mask the same
1376
        shape as ``im``, or a tuple of indices such as that returned by the
1377
        ``where`` function.  Any voxels *not* connected directly to
1378
        the inlets will be trimmed.
1379
    strel : ND-array
1380
        The neighborhood over which connectivity should be checked. It must
1381
        be symmetric and the same dimensionality as the image.  It is passed
1382
        directly to the ``scipy.ndimage.label`` function as the ``structure``
1383
        argument so refer to that docstring for additional info.
1384

1385
    Returns
1386
    -------
1387
    image : ND-array
1388
        An array of the same shape as ``im``, but with all foreground
1389
        voxels not connected to the ``inlets`` removed.
1390
    """
1391 1
    if type(inlets) == tuple:
1392 0
        temp = np.copy(inlets)
1393 0
        inlets = np.zeros_like(im, dtype=bool)
1394 0
        inlets[temp] = True
1395 1
    elif (inlets.shape == im.shape) and (inlets.max() == 1):
1396 1
        inlets = inlets.astype(bool)
1397
    else:
1398 0
        raise Exception("inlets not valid, refer to docstring for info")
1399 1
    if im.ndim == 3:
1400 1
        strel = cube
1401
    else:
1402 1
        strel = square
1403 1
    labels = spim.label(inlets + (im > 0), structure=strel(3))[0]
1404 1
    keep = np.unique(labels[inlets])
1405 1
    keep = keep[keep > 0]
1406 1
    if len(keep) > 0:
1407 1
        im2 = np.reshape(np.in1d(labels, keep), newshape=im.shape)
1408
    else:
1409 0
        im2 = np.zeros_like(im)
1410 1
    im2 = im2 * im
1411 1
    return im2
1412

1413

1414 1
def _get_axial_shifts(ndim=2, include_diagonals=False):
1415
    r"""
1416
    Helper function to generate the axial shifts that will be performed on
1417
    the image to identify bordering pixels/voxels
1418
    """
1419 1
    if ndim == 2:
1420 1
        if include_diagonals:
1421 1
            neighbors = square(3)
1422
        else:
1423 1
            neighbors = diamond(1)
1424 1
        neighbors[1, 1] = 0
1425 1
        x, y = np.where(neighbors)
1426 1
        x -= 1
1427 1
        y -= 1
1428 1
        return np.vstack((x, y)).T
1429
    else:
1430 1
        if include_diagonals:
1431 1
            neighbors = cube(3)
1432
        else:
1433 1
            neighbors = octahedron(1)
1434 1
        neighbors[1, 1, 1] = 0
1435 1
        x, y, z = np.where(neighbors)
1436 1
        x -= 1
1437 1
        y -= 1
1438 1
        z -= 1
1439 1
        return np.vstack((x, y, z)).T
1440

1441

1442 1
def _make_stack(im, include_diagonals=False):
1443
    r"""
1444
    Creates a stack of images with one extra dimension to the input image
1445
    with length equal to the number of borders to search + 1.
1446
    Image is rolled along the axial shifts so that the border pixel is
1447
    overlapping the original pixel. First image in stack is the original.
1448
    Stacking makes direct vectorized array comparisons possible.
1449
    """
1450 1
    ndim = len(np.shape(im))
1451 1
    axial_shift = _get_axial_shifts(ndim, include_diagonals)
1452 1
    if ndim == 2:
1453 1
        stack = np.zeros([np.shape(im)[0], np.shape(im)[1], len(axial_shift) + 1])
1454 1
        stack[:, :, 0] = im
1455 1
        for i in range(len(axial_shift)):
1456 1
            ax0, ax1 = axial_shift[i]
1457 1
            temp = np.roll(np.roll(im, ax0, 0), ax1, 1)
1458 1
            stack[:, :, i + 1] = temp
1459 1
        return stack
1460 1
    elif ndim == 3:
1461 1
        stack = np.zeros(
1462
            [np.shape(im)[0], np.shape(im)[1], np.shape(im)[2], len(axial_shift) + 1]
1463
        )
1464 1
        stack[:, :, :, 0] = im
1465 1
        for i in range(len(axial_shift)):
1466 1
            ax0, ax1, ax2 = axial_shift[i]
1467 1
            temp = np.roll(np.roll(np.roll(im, ax0, 0), ax1, 1), ax2, 2)
1468 1
            stack[:, :, :, i + 1] = temp
1469 1
        return stack
1470

1471

1472 1
def nphase_border(im, include_diagonals=False):
1473
    r"""
1474
    Identifies the voxels in regions that border *N* other regions.
1475

1476
    Useful for finding triple-phase boundaries.
1477

1478
    Parameters
1479
    ----------
1480
    im : ND-array
1481
        An ND image of the porous material containing discrete values in the
1482
        pore space identifying different regions. e.g. the result of a
1483
        snow-partition
1484

1485
    include_diagonals : boolean
1486
        When identifying bordering pixels (2D) and voxels (3D) include those
1487
        shifted along more than one axis
1488

1489
    Returns
1490
    -------
1491
    image : ND-array
1492
        A copy of ``im`` with voxel values equal to the number of uniquely
1493
        different bordering values
1494
    """
1495
    if im.ndim != im.squeeze().ndim:    # pragma: no cover
1496
        warnings.warn((
1497
            f"Input image conains a singleton axis: {im.shape}."
1498
            " Reduce dimensionality with np.squeeze(im) to avoid"
1499
            " unexpected behavior."
1500
        ))
1501
    # Get dimension of image
1502 1
    ndim = len(np.shape(im))
1503 1
    if ndim not in [2, 3]:
1504
        raise NotImplementedError("Function only works for 2d and 3d images")
1505
    # Pad image to handle edges
1506 1
    im = np.pad(im, pad_width=1, mode="edge")
1507
    # Stack rolled images for each neighbor to be inspected
1508 1
    stack = _make_stack(im, include_diagonals)
1509
    # Sort the stack along the last axis
1510 1
    stack.sort()
1511 1
    out = np.ones_like(im)
1512
    # Run through stack recording when neighbor id changes
1513
    # Number of changes is number of unique bordering regions
1514 1
    for k in range(np.shape(stack)[ndim])[1:]:
1515 1
        if ndim == 2:
1516 1
            mask = stack[:, :, k] != stack[:, :, k - 1]
1517 1
        elif ndim == 3:
1518 1
            mask = stack[:, :, :, k] != stack[:, :, :, k - 1]
1519 1
        out += mask
1520
    # Un-pad
1521 1
    if ndim == 2:
1522 1
        return out[1:-1, 1:-1].copy()
1523
    else:
1524 1
        return out[1:-1, 1:-1, 1:-1].copy()
1525

1526

1527 1
def prune_branches(skel, branch_points=None, iterations=1, **kwargs):
1528
    r"""
1529
    Removes all dangling ends or tails of a skeleton.
1530

1531
    Parameters
1532
    ----------
1533
    skel : ND-image
1534
        A image of a full or partial skeleton from which the tails should be
1535
        trimmed.
1536

1537
    branch_points : ND-image, optional
1538
        An image the same size ``skel`` with True values indicating the branch
1539
        points of the skeleton.  If this is not provided it is calculated
1540
        automatically.
1541

1542
    Returns
1543
    -------
1544
    An ND-image containing the skeleton with tails removed.
1545

1546
    """
1547 1
    skel = skel > 0
1548 1
    if skel.ndim == 2:
1549 0
        from skimage.morphology import square as cube
1550
    else:
1551 1
        from skimage.morphology import cube
1552 1
    parallel = kwargs.pop('parallel', False)
1553 1
    divs = kwargs.pop('divs', 2)
1554 1
    cores = kwargs.pop('cores', None)
1555
    # Create empty image to house results
1556 1
    im_result = np.zeros_like(skel)
1557
    # If branch points are not supplied, attempt to find them
1558 1
    if branch_points is None:
1559 1
        branch_points = spim.convolve(skel * 1.0, weights=cube(3)) > 3
1560 1
        branch_points = branch_points * skel
1561
    # Store original branch points before dilating
1562 1
    pts_orig = branch_points
1563
    # Find arcs of skeleton by deleting branch points
1564 1
    arcs = skel * (~branch_points)
1565
    # Label arcs
1566 1
    arc_labels = spim.label(arcs, structure=cube(3))[0]
1567
    # Dilate branch points so they overlap with the arcs
1568 1
    if parallel:
1569 1
        branch_points = chunked_func(func=spim.binary_dilation,
1570
                                     input=branch_points, structure=cube(3),
1571
                                     overlap=3, divs=divs, cores=cores)
1572
    else:
1573 1
        branch_points = spim.binary_dilation(branch_points, structure=cube(3))
1574 1
    pts_labels = spim.label(branch_points, structure=cube(3))[0]
1575
    # Now scan through each arc to see if it's connected to two branch points
1576 1
    slices = spim.find_objects(arc_labels)
1577 1
    label_num = 0
1578 1
    for s in slices:
1579 0
        label_num += 1
1580
        # Find branch point labels the overlap current arc
1581 0
        hits = pts_labels[s] * (arc_labels[s] == label_num)
1582
        # If image contains 2 branch points, then it's not a tail.
1583 0
        if len(np.unique(hits)) == 3:
1584 0
            im_result[s] += arc_labels[s] == label_num
1585
    # Add missing branch points back to arc image to make complete skeleton
1586 1
    im_result += skel * pts_orig
1587 1
    if iterations > 1:
1588 1
        iterations -= 1
1589 1
        im_temp = np.copy(im_result)
1590 1
        im_result = prune_branches(skel=im_result,
1591
                                   branch_points=None,
1592
                                   iterations=iterations,
1593
                                   parallel=parallel,
1594
                                   divs=divs, cores=cores)
1595 1
        if np.all(im_temp == im_result):
1596 1
            iterations = 0
1597 1
    return im_result
1598

1599

1600 1
def chunked_func(func,
1601
                 overlap=None,
1602
                 divs=2,
1603
                 cores=None,
1604
                 im_arg=["input", "image", "im"],
1605
                 strel_arg=["strel", "structure", "footprint"],
1606
                 **kwargs):
1607
    r"""
1608
    Performs the specfied operation "chunk-wise" in parallel using dask
1609

1610
    This can be used to save memory by doing one chunk at a time (``cores=1``)
1611
    or to increase computation speed by spreading the work across multiple
1612
    cores (e.g. ``cores=8``)
1613

1614
    This function can be used with any operation that applies a structuring
1615
    element of some sort, since this implies that the operation is local
1616
    and can be chunked.
1617

1618
    Parameters
1619
    ----------
1620
    func : function handle
1621
        The function which should be applied to each chunk, such as
1622
        ``spipy.ndimage.binary_dilation``.
1623
    overlap : scalar or list of scalars, optional
1624
        The amount of overlap to include when dividing up the image.  This
1625
        value will almost always be the size (i.e. diameter) of the
1626
        structuring element. If not specified then the amount of overlap is
1627
        inferred from the size of the structuring element, in which case the
1628
        ``strel_arg`` must be specified.
1629
    divs : scalar or list of scalars (default = [2, 2, 2])
1630
        The number of chunks to divide the image into in each direction.  The
1631
        default is 2 chunks in each direction, resulting in a quartering of
1632
        the image and 8 total chunks (in 3D).  A scalar is interpreted as
1633
        applying to all directions, while a list of scalars is interpreted
1634
        as applying to each individual direction.
1635
    cores : scalar
1636
        The number of cores which should be used.  By default, all cores will
1637
        be used, or as many are needed for the given number of chunks, which
1638
        ever is smaller.
1639
    im_arg : string
1640
        The keyword used by ``func`` for the image to be operated on.  By
1641
        default this function will look for ``image``, ``input``, and ``im``
1642
        which are commonly used by *scipy.ndimage* and *skimage*.
1643
    strel_arg : string
1644
        The keyword used by ``func`` for the structuring element to apply.
1645
        This is only needed if ``overlap`` is not specified. By default this
1646
        function will look for ``strel``, ``structure``, and ``footprint``
1647
        which are commonly used by *scipy.ndimage* and *skimage*.
1648
    kwargs : additional keyword arguments
1649
        All other arguments are passed to ``func`` as keyword arguments. Note
1650
        that PoreSpy will fetch the image from this list of keywords using the
1651
        value provided to ``im_arg``.
1652

1653
    Returns
1654
    -------
1655
    result : ND-image
1656
        An image the same size as the input image, with the specified filter
1657
        applied as though done on a single large image.  There should be *no*
1658
        difference.
1659

1660
    Notes
1661
    -----
1662
    This function divides the image into the specified number of chunks, but
1663
    also applies a padding to each chunk to create an overlap with neighboring
1664
    chunks.  This way the operation does not have any edge artifacts. The
1665
    amount of padding is usually equal to the radius of the structuring
1666
    element but some functions do not use one, such as the distance transform
1667
    and Gaussian blur.  In these cases the user can specify ``overlap``.
1668

1669
    See Also
1670
    --------
1671
    skikit-image.util.apply_parallel
1672

1673
    Examples
1674
    --------
1675
    >>> import scipy.ndimage as spim
1676
    >>> import porespy as ps
1677
    >>> from skimage.morphology import ball
1678
    >>> im = ps.generators.blobs(shape=[100, 100, 100])
1679
    >>> f = spim.binary_dilation
1680
    >>> im2 = ps.filters.chunked_func(func=f, overlap=7, im_arg='input',
1681
    ...                               input=im, structure=ball(3), cores=1)
1682
    >>> im3 = spim.binary_dilation(input=im, structure=ball(3))
1683
    >>> np.all(im2 == im3)
1684
    True
1685

1686
    """
1687

1688 1
    @dask.delayed
1689
    def apply_func(func, **kwargs):
1690
        # Apply function on sub-slice of overall image
1691 1
        return func(**kwargs)
1692

1693
    # Import the array_split methods
1694 1
    from array_split import shape_split, ARRAY_BOUNDS
1695

1696
    # Determine the value for im_arg
1697 1
    if type(im_arg) == str:
1698 1
        im_arg = [im_arg]
1699 1
    for item in im_arg:
1700 1
        if item in kwargs.keys():
1701 1
            im = kwargs[item]
1702 1
            im_arg = item
1703 1
            break
1704
    # Fetch image from the kwargs dict
1705 1
    im = kwargs[im_arg]
1706
    # Determine the number of divisions to create
1707 1
    divs = np.ones((im.ndim,), dtype=int) * np.array(divs)
1708
    # If overlap given then use it, otherwise search for strel in kwargs
1709 1
    if overlap is not None:
1710 1
        halo = overlap * (divs > 1)
1711
    else:
1712 1
        if type(strel_arg) == str:
1713 1
            strel_arg = [strel_arg]
1714 1
        for item in strel_arg:
1715 1
            if item in kwargs.keys():
1716 1
                strel = kwargs[item]
1717 1
                break
1718 1
        halo = np.array(strel.shape) * (divs > 1)
1719 1
    slices = np.ravel(
1720
        shape_split(
1721
            im.shape, axis=divs, halo=halo.tolist(), tile_bounds_policy=ARRAY_BOUNDS
1722
        )
1723
    )
1724
    # Apply func to each subsection of the image
1725 1
    res = []
1726
    # print('Image will be broken into the following chunks:')
1727 1
    for s in slices:
1728
        # Extract subsection from image and input into kwargs
1729 1
        kwargs[im_arg] = im[tuple(s)]
1730
        # print(kwargs[im_arg].shape)
1731 1
        res.append(apply_func(func=func, **kwargs))
1732
    # Have dask actually compute the function on each subsection in parallel
1733
    # with ProgressBar():
1734
    #    ims = dask.compute(res, num_workers=cores)[0]
1735 1
    ims = dask.compute(res, num_workers=cores)[0]
1736
    # Finally, put the pieces back together into a single master image, im2
1737 1
    im2 = np.zeros_like(im, dtype=im.dtype)
1738 1
    for i, s in enumerate(slices):
1739
        # Prepare new slice objects into main and sub-sliced image
1740 1
        a = []  # Slices into main image
1741 1
        b = []  # Slices into chunked image
1742 1
        for dim in range(im.ndim):
1743 1
            if s[dim].start == 0:
1744 1
                ax = bx = 0
1745
            else:
1746 1
                ax = s[dim].start + halo[dim]
1747 1
                bx = halo[dim]
1748 1
            if s[dim].stop == im.shape[dim]:
1749 1
                ay = by = im.shape[dim]
1750
            else:
1751 1
                ay = s[dim].stop - halo[dim]
1752 1
                by = s[dim].stop - s[dim].start - halo[dim]
1753 1
            a.append(slice(ax, ay, None))
1754 1
            b.append(slice(bx, by, None))
1755
        # Convert lists of slices to tuples
1756 1
        a = tuple(a)
1757 1
        b = tuple(b)
1758
        # Insert image chunk into main image
1759 1
        im2[a] = ims[i][b]
1760 1
    return im2
1761

1762

1763 1
def snow_partitioning_parallel(im,
1764
                               overlap='dt',
1765
                               divs=2,
1766
                               mode='parallel',
1767
                               num_workers=None,
1768
                               crop=True,
1769
                               zoom_factor=0.5,
1770
                               r_max=5,
1771
                               sigma=0.4,
1772
                               return_all=False):
1773
    r"""
1774
    Perform SNOW algorithm in parallel and serial mode to reduce time and
1775
    memory usage repectively by geomertirc domain decomposition of large size
1776
    image.
1777

1778
    Parameters
1779
    ----------
1780
    im: ND_array
1781
        A binary image of porous media with 'True' values indicating phase of
1782
        interest
1783

1784
    overlap: float or int or str
1785
        Overlapping thickness between two subdomains that is used to merge
1786
        watershed segmented regions at intersection of two or more subdomains.
1787
        If 'dt' the overlap will be calculated based on maximum
1788
        distance transform in the whole image.
1789
        If 'ws' the overlap will be calculated by finding the maximum dimension
1790
        of the bounding box of largest segmented region. The image is scale down
1791
        by 'zoom_factor' provided by user.
1792
        If any real number of overlap is provided then this value will be
1793
        considered as overlapping thickness.
1794

1795
    divs: list or int
1796
        Number of domains each axis will be divided.
1797
        If a scalar is provided then it will be assigned to all axis.
1798
        If list is provided then each respective axis will be divided by its
1799
        corresponding number in the list. For example [2, 3, 4] will divide
1800
        z, y and x axis to 2, 3, and 4 respectively.
1801

1802
    mode: str
1803
        if 'parallel' then all subdomains will be processed in number of cores
1804
        provided as num_workers
1805
        if 'serial' then all subdomains will be processed one by one in one core
1806
        of CPU.
1807

1808
    num_workers: int or None
1809
        Number of cores that will be used to parallel process all domains.
1810
        If None then all cores will be used but user can specify any integer
1811
        values to control the memory usage.
1812

1813
    crop: bool
1814
        If True the image shape is cropped to fit specified division.
1815

1816
    zoom_factor: float or int
1817
        The amount of zoom appiled to image to find overlap thickness using "ws"
1818
        overlap mode.
1819

1820
    return_all : boolean
1821
        If set to ``True`` a named tuple is returned containing the original
1822
        image, the distance transform, and the final
1823
        pore regions.  The default is ``False``
1824

1825
    Returns
1826
    ----------
1827
    regions: ND_array
1828
        Partitioned image of segmentated regions with unique labels. Each
1829
        region correspond to pore body while intersection with other region
1830
        correspond throat area.
1831
    """
1832
    # --------------------------------------------------------------------------
1833
    # Adjust image shape according to specified dimension
1834 1
    tup = namedtuple("results", field_names=["im", "dt", "regions"])
1835 1
    if isinstance(divs, int):
1836 0
        divs = [divs for i in range(im.ndim)]
1837 1
    shape = []
1838 1
    for i in range(im.ndim):
1839 1
        shape.append(divs[i] * (im.shape[i] // divs[i]))
1840

1841 1
    if shape != list(im.shape):
1842 0
        if crop:
1843 0
            for i in range(im.ndim):
1844 0
                im = im.swapaxes(0, i)
1845 0
                im = im[:shape[i], ...]
1846 0
                im = im.swapaxes(i, 0)
1847 0
            print(f'Image is cropped to shape {shape}.')
1848
        else:
1849 0
            print('-' * 80)
1850 0
            print(f"Possible image shape for specified divisions is {shape}.")
1851 0
            print("To crop the image please set crop argument to 'True'.")
1852 0
            return
1853
    # --------------------------------------------------------------------------
1854
    # Get overlap thickness from distance transform
1855 1
    chunk_shape = (np.array(shape) / np.array(divs)).astype(int)
1856 1
    print('# Beginning parallel SNOW algorithm...')
1857 1
    print('=' * 80)
1858 1
    print('Calculating overlap thickness')
1859 1
    if overlap == 'dt':
1860 1
        dt = edt((im > 0), parallel=0)
1861 1
        overlap = dt.max()
1862 1
    elif overlap == 'ws':
1863 1
        rev = spim.interpolation.zoom(im, zoom=zoom_factor, order=0)
1864 1
        rev = rev > 0
1865 1
        dt = edt(rev, parallel=0)
1866 1
        rev_snow = snow_partitioning(rev, dt=dt, r_max=r_max, sigma=sigma)
1867 1
        labels, counts = np.unique(rev_snow, return_counts=True)
1868 1
        node = np.where(counts == counts[1:].max())[0][0]
1869 1
        slices = spim.find_objects(rev_snow)
1870 1
        overlap = max(rev_snow[slices[node - 1]].shape) / (zoom_factor * 2.0)
1871 1
        dt = edt((im > 0), parallel=0)
1872
    else:
1873 0
        overlap = overlap / 2.0
1874 0
        dt = edt((im > 0), parallel=0)
1875 1
    print('Overlap Thickness: ' + str(int(2.0 * overlap)) + ' voxels')
1876
    # --------------------------------------------------------------------------
1877
    # Get overlap and trim depth of all image dimension
1878 1
    depth = {}
1879 1
    trim_depth = {}
1880 1
    for i in range(im.ndim):
1881 1
        depth[i] = int(2.0 * overlap)
1882 1
        trim_depth[i] = int(2.0 * overlap) - 1
1883

1884 1
    tup.im = im
1885 1
    tup.dt = dt
1886
    # --------------------------------------------------------------------------
1887
    # Applying snow to image chunks
1888 1
    im = da.from_array(dt, chunks=chunk_shape)
1889 1
    im = da.overlap.overlap(im, depth=depth, boundary='none')
1890 1
    im = im.map_blocks(chunked_snow, r_max=r_max, sigma=sigma)
1891 1
    im = da.overlap.trim_internal(im, trim_depth, boundary='none')
1892 1
    if mode == 'serial':
1893 0
        num_workers = 1
1894 1
    elif mode == 'parallel':
1895 1
        num_workers = num_workers
1896
    else:
1897 0
        raise Exception('Mode of operation can either be parallel or serial')
1898 1
    with ProgressBar():
1899
        # print('-' * 80)
1900 1
        print('Applying snow to image chunks')
1901 1
        regions = im.compute(num_workers=num_workers)
1902
    # --------------------------------------------------------------------------
1903
    # Relabelling watershed chunks
1904
    # print('-' * 80)
1905 1
    print('Relabelling watershed chunks')
1906 1
    regions = relabel_chunks(im=regions, chunk_shape=chunk_shape)
1907
    # --------------------------------------------------------------------------
1908
    # Stitching watershed chunks
1909
    # print('-' * 80)
1910 1
    print('Stitching watershed chunks')
1911 1
    regions = watershed_stitching(im=regions, chunk_shape=chunk_shape)
1912 1
    print('=' * 80)
1913 1
    if return_all:
1914 1
        tup.regions = regions
1915 1
        return tup
1916
    else:
1917 0
        return regions
1918

1919 0
    return regions
1920

1921

1922 1
def chunked_snow(im, r_max=5, sigma=0.4):
1923
    r"""
1924
    Partitions the void space into pore regions using a marker-based watershed
1925
    algorithm, with specially filtered peaks as markers.
1926

1927
    The SNOW network extraction algorithm (Sub-Network of an Over-segmented
1928
    Watershed) was designed to handle to perculiarities of high porosity
1929
    materials, but it applies well to other materials as well.
1930

1931
    Parameters
1932
    ----------
1933
    im : array_like
1934
        Distance transform of phase of interest in a binary image
1935
    r_max : int
1936
        The radius of the spherical structuring element to use in the Maximum
1937
        filter stage that is used to find peaks.  The default is 5
1938
    sigma : float
1939
        The standard deviation of the Gaussian filter used in step 1.  The
1940
        default is 0.4.  If 0 is given then the filter is not applied, which is
1941
        useful if a distance transform is supplied as the ``im`` argument that
1942
        has already been processed.
1943

1944
    Returns
1945
    -------
1946
    image : ND-array
1947
        An image the same shape as ``im`` with the void space partitioned into
1948
        pores using a marker based watershed with the peaks found by the
1949
        SNOW algorithm [1].
1950

1951
    References
1952
    ----------
1953
    [1] Gostick, J. "A versatile and efficient network extraction algorithm
1954
    using marker-based watershed segmenation".  Physical Review E. (2017)
1955
    """
1956

1957 1
    dt = spim.gaussian_filter(input=im, sigma=sigma)
1958 1
    peaks = find_peaks(dt=dt, r_max=r_max)
1959 1
    peaks = trim_saddle_points(peaks=peaks, dt=dt, max_iters=99, verbose=0)
1960 1
    peaks = trim_nearby_peaks(peaks=peaks, dt=dt)
1961 1
    peaks, N = spim.label(peaks)
1962 1
    regions = watershed(image=-dt, markers=peaks, mask=im > 0)
1963

1964 1
    return regions * (im > 0)
1965

1966

1967 1
def pad(im, pad_width=1, constant_value=0):
1968
    r"""
1969
    Pad the image with a constant values and width.
1970

1971
    Parameters:
1972
    ----------
1973
    im : ND-array
1974
        The image that requires padding
1975
    pad_width : int
1976
        The number of values that will be padded from the edges. Default values
1977
        is 1.
1978
    contant_value : int
1979
        Pads with the specified constant value
1980

1981
    return:
1982
    -------
1983
    output: ND-array
1984
        Padded image with same dimnesions as provided image
1985
    """
1986 1
    shape = np.array(im.shape)
1987 1
    pad_shape = shape + (2 * pad_width)
1988 1
    temp = np.zeros(pad_shape, dtype=np.uint32)
1989 1
    if constant_value != 0:
1990 0
        temp = temp + constant_value
1991 1
    if im.ndim == 3:
1992 0
        temp[pad_width: -pad_width,
1993
             pad_width: -pad_width,
1994
             pad_width: -pad_width] = im
1995 1
    elif im.ndim == 2:
1996 1
        temp[pad_width: -pad_width,
1997
             pad_width: -pad_width] = im
1998
    else:
1999 0
        temp[pad_width: -pad_width] = im
2000

2001 1
    return temp
2002

2003

2004
def relabel_chunks(im, chunk_shape):  # pragma: no cover
2005
    r"""
2006
    Assign new labels to each chunk or sub-domain of actual image. This
2007
    prevents from two or more regions to have same label.
2008

2009
    Parameters:
2010
    -----------
2011

2012
    im: ND-array
2013
        Actual image that contains repeating labels in chunks or sub-domains
2014

2015
    chunk_shape: tuple
2016
        The shape of chunk that will be relabeled in actual image. Note the
2017
        chunk shape should be a multiple of actual image shape otherwise some
2018
        labels will not be relabeled.
2019

2020
    return:
2021
    -------
2022
    output : ND-array
2023
        Relabeled image with unique label assigned to each region.
2024
    """
2025
    im = pad(im, pad_width=1)
2026
    im_shape = np.array(im.shape, dtype=np.uint32)
2027
    max_num = 0
2028
    c = np.array(chunk_shape, dtype=np.uint32) + 2
2029
    num = (im_shape / c).astype(int)
2030

2031
    if im.ndim == 3:
2032
        for z in range(num[0]):
2033
            for y in range(num[1]):
2034
                for x in range(num[2]):
2035
                    chunk = im[z * c[0]: (z + 1) * c[0],
2036
                               y * c[1]: (y + 1) * c[1],
2037
                               x * c[2]: (x + 1) * c[2]]
2038
                    chunk += max_num
2039
                    chunk[chunk == max_num] = 0
2040
                    max_num = chunk.max()
2041
                    im[z * c[0]: (z + 1) * c[0],
2042
                       y * c[1]: (y + 1) * c[1],
2043
                       x * c[2]: (x + 1) * c[2]] = chunk
2044
    else:
2045
        for y in range(num[0]):
2046
            for x in range(num[1]):
2047
                chunk = im[y * c[0]: (y + 1) * c[0],
2048
                           x * c[1]: (x + 1) * c[1]]
2049
                chunk += max_num
2050
                chunk[chunk == max_num] = 0
2051
                max_num = chunk.max()
2052
                im[y * c[0]: (y + 1) * c[0],
2053
                   x * c[1]: (x + 1) * c[1]] = chunk
2054

2055
    return im
2056

2057

2058
def trim_internal_slice(im, chunk_shape):  # pragma: no cover
2059
    r"""
2060
    Delete extra slices from image that were used to stitch two or more chunks
2061
    together.
2062

2063
    Parameters:
2064
    -----------
2065

2066
    im :  ND-array
2067
        image that contains extra slices in x, y, z direction.
2068

2069
    chunk_shape : tuple
2070
        The shape of the chunk from which image is subdivided.
2071

2072
    Return:
2073
    -------
2074
    output :  ND-array
2075
        Image without extra internal slices. The shape of the image will be
2076
        same as input image provided for  waterhsed segmentation.
2077
    """
2078
    im_shape = np.array(im.shape, dtype=np.uint32)
2079
    c1 = np.array(chunk_shape, dtype=np.uint32) + 2
2080
    c2 = np.array(chunk_shape, dtype=np.uint32)
2081
    num = (im_shape / c1).astype(int)
2082
    out_shape = num * c2
2083
    out = np.empty((out_shape), dtype=np.uint32)
2084

2085
    if im.ndim == 3:
2086
        for z in range(num[0]):
2087
            for y in range(num[1]):
2088
                for x in range(num[2]):
2089
                    chunk = im[z * c1[0]: (z + 1) * c1[0],
2090
                               y * c1[1]: (y + 1) * c1[1],
2091
                               x * c1[2]: (x + 1) * c1[2]]
2092

2093
                    out[z * c2[0]: (z + 1) * c2[0],
2094
                        y * c2[1]: (y + 1) * c2[1],
2095
                        x * c2[2]: (x + 1) * c2[2]] = chunk[1:-1, 1:-1, 1:-1]
2096
    else:
2097
        for y in range(num[0]):
2098
            for x in range(num[1]):
2099
                chunk = im[y * c1[0]: (y + 1) * c1[0],
2100
                           x * c1[1]: (x + 1) * c1[1]]
2101

2102
                out[y * c2[0]: (y + 1) * c2[0],
2103
                    x * c2[1]: (x + 1) * c2[1]] = chunk[1:-1, 1:-1]
2104

2105
    return out
2106

2107

2108 1
def watershed_stitching(im, chunk_shape):
2109
    r"""
2110
    Stitch individual sub-domains of watershed segmentation into one big
2111
    segmentation with all boundary labels of each sub-domain relabeled to merge
2112
    boundary regions.
2113

2114
    Parameters:
2115
    -----------
2116
    im : ND-array
2117
        A worked image with watershed segmentation performed on all sub-domains
2118
        individually.
2119

2120
    chunk_shape: tuple
2121
        The shape of the sub-domain in which image segmentation is performed.
2122

2123
    return:
2124
    -------
2125
    output : ND-array
2126
        Stitched watershed segmentation with all sub-domains merged to form a
2127
        single watershed segmentation.
2128
    """
2129

2130 1
    c_shape = np.array(chunk_shape)
2131 1
    cuts_num = (np.array(im.shape) / c_shape).astype(np.uint32)
2132

2133 1
    for axis, num in enumerate(cuts_num):
2134 1
        keys = []
2135 1
        values = []
2136 1
        if num > 1:
2137 1
            im = im.swapaxes(0, axis)
2138 1
            for i in range(1, num):
2139 1
                sl = i * (chunk_shape[axis] + 3) - (i - 1)
2140 1
                sl1 = im[sl - 3, ...]
2141 1
                sl1_mask = sl1 > 0
2142 1
                sl2 = im[sl - 1, ...] * sl1_mask
2143 1
                sl1_labels = sl1.flatten()[sl1.flatten() > 0]
2144 1
                sl2_labels = sl2.flatten()[sl2.flatten() > 0]
2145 1
                if sl1_labels.size != sl2_labels.size:
2146 0
                    raise Exception('The selected overlapping thickness is not '
2147
                                    'suitable for input image. Change '
2148
                                    'overlapping criteria '
2149
                                    'or manually input value.')
2150 1
                keys.append(sl1_labels)
2151 1
                values.append(sl2_labels)
2152 1
            im = replace_labels(array=im, keys=keys, values=values)
2153 1
            im = im.swapaxes(axis, 0)
2154 1
    im = trim_internal_slice(im=im, chunk_shape=chunk_shape)
2155 1
    im = resequence_labels(array=im)
2156

2157 1
    return im
2158

2159

2160 1
@njit(parallel=True)
2161
def copy(im, output):  # pragma: no cover
2162
    r"""
2163
    The function copy the input array and make output array that is allocated
2164
    in different memory space. This a numba version of copy function of numpy.
2165
    Because each element is copied using parallel approach this implementation
2166
    is facter than numpy version of copy.
2167

2168
    parameter:
2169
    ----------
2170
    array: ND-array
2171
        Array that needs to be copied
2172

2173
    Return:
2174
    -------
2175
    output: ND-array
2176
        Copied array
2177
    """
2178

2179
    if im.ndim == 3:
2180
        for i in prange(im.shape[0]):
2181
            for j in prange(im.shape[1]):
2182
                for k in prange(im.shape[2]):
2183
                    output[i, j, k] = im[i, j, k]
2184
    elif im.ndim == 2:
2185
        for i in prange(im.shape[0]):
2186
            for j in prange(im.shape[1]):
2187
                output[i, j] = im[i, j]
2188
    else:
2189
        for i in prange(im.shape[0]):
2190
            output[i] = im[i]
2191

2192
    return output
2193

2194

2195 1
@njit(parallel=True)
2196
def _replace(array, keys, values, ind_sort):  # pragma: no cover
2197
    r"""
2198
    This function replace keys elements in input array with new value elements.
2199
    This function is used as internal function of replace_relabels.
2200

2201
    Parameter:
2202
    ----------
2203
    array : ND-array
2204
        Array which requires replacing labels
2205
    keys :  1D-array
2206
        The unique labels that need to be replaced
2207
    values : 1D-array
2208
        The unique values that will be assigned to labels
2209

2210
    return:
2211
    -------
2212
    array : ND-array
2213
        Array with replaced labels.
2214
    """
2215
    # ind_sort = np.argsort(keys)
2216
    keys_sorted = keys[ind_sort]
2217
    values_sorted = values[ind_sort]
2218
    s_keys = set(keys)
2219

2220
    for i in prange(array.shape[0]):
2221
        if array[i] in s_keys:
2222
            ind = np.searchsorted(keys_sorted, array[i])
2223
            array[i] = values_sorted[ind]
2224

2225

2226 1
def replace_labels(array, keys, values):
2227
    r"""
2228
    Replace labels in array provided as keys to values.
2229

2230
    Parameter:
2231
    ----------
2232
    array : ND-array
2233
        Array which requires replacing labels
2234
    keys :  1D-array
2235
        The unique labels that need to be replaced
2236
    values : 1D-array
2237
        The unique values that will be assigned to labels
2238

2239
    return:
2240
    -------
2241
    array : ND-array
2242
        Array with replaced labels.
2243
    """
2244 1
    a_shape = array.shape
2245 1
    array = array.flatten()
2246 1
    keys = np.concatenate(keys, axis=0)
2247 1
    values = np.concatenate(values, axis=0)
2248 1
    ind_sort = np.argsort(keys)
2249 1
    _replace(array, keys, values, ind_sort)
2250

2251 1
    return array.reshape(a_shape)
2252

2253

2254 1
@njit()
2255
def _sequence(array, count):  # pragma: no cover
2256
    r"""
2257
    Internal function of resequnce_labels method. This function resquence array
2258
    elements in an ascending order using numba technique which is many folds
2259
    faster than make contigious funcition.
2260

2261
    parameter:
2262
    ----------
2263
    array: 1d-array
2264
        1d-array that needs resquencing
2265
    count: 1d-array
2266
        1d-array of zeros having same size as array
2267

2268
    return:
2269
    -------
2270
    array: 1d-array
2271
        The input array with elements resequenced in ascending order
2272
    Note: The output of this function is not same as make_contigous or
2273
    relabel_sequential function of scikit-image. This function resequence and
2274
    randomize the regions while other methods only do resequencing and output
2275
    sorted array.
2276
    """
2277
    a = 1
2278
    i = 0
2279
    while i < (len(array)):
2280
        data = array[i]
2281
        if data != 0:
2282
            if count[data] == 0:
2283
                count[data] = a
2284
                a += 1
2285
        array[i] = count[data]
2286
        i += 1
2287

2288

2289 1
@njit(parallel=True)
2290
def amax(array):  # pragma: no cover
2291
    r"""
2292
    Find largest element in an array using fast parallel numba technique
2293

2294
    Parameter:
2295
    ----------
2296
    array: ND-array
2297
        array in which largest elements needs to be calcuted
2298

2299
    return:
2300
    scalar: float or int
2301
        The largest element value in the input array
2302
    """
2303

2304
    return np.max(array)
2305

2306

2307 1
def resequence_labels(array):
2308
    r"""
2309
    Resequence the lablels to make them contigious.
2310

2311
    Parameter:
2312
    ----------
2313
    array: ND-array
2314
        Array that requires resequencing
2315

2316
    return:
2317
    -------
2318
    array : ND-array
2319
        Resequenced array with same shape as input array
2320
    """
2321 1
    a_shape = array.shape
2322 1
    array = array.ravel()
2323 1
    max_num = amax(array) + 1
2324 1
    count = np.zeros(max_num, dtype=np.uint32)
2325 1
    _sequence(array, count)
2326

2327 1
    return array.reshape(a_shape)

Read our documentation on viewing source code .

Loading