#46 Add persistence landscape functionality

Open Calder calderds

No flags found

Use flags to group coverage reports by test type, project and/or folders.
Then setup custom commit statuses and notifications for each flag.

e.g., #unittest #integration

#production #enterprise

#frontend #backend

Learn more about Codecov Flags here.


@@ -0,0 +1,460 @@
Loading
1 +
"""
2 +
Define Persistence Landscape Exact class.
3 +
"""
4 +
import itertools
5 +
import numpy as np
6 +
from operator import itemgetter
7 +
from .landscape_auxiliary import union_crit_pairs
8 +
from .PersLandscape import PersLandscape
9 +
from .PersLandscapeGrid import PersLandscapeGrid
10 +
11 +
12 +
class PersLandscapeExact(PersLandscape):
13 +
    """Persistence Landscape Exact class.
14 +
15 +
    This class implements an exact version of Persistence Landscapes. The landscape
16 +
    functions are stored as a list of critical pairs, and the actual function is the
17 +
    linear interpolation of these critical pairs. All
18 +
    computations done with these classes are exact. For much faster but
19 +
    approximate methods that should suffice for most applications, consider
20 +
    `PersLandscapeGrid`.
21 +
22 +
    Parameters
23 +
    ----------
24 +
    dgms : list of numpy arrays, optional
25 +
        A nested list of numpy arrays, e.g., [array( array([:]), array([:]) ),..., array()]
26 +
        Each entry in the list corresponds to a single homological degree.
27 +
        Each array represents the birth-death pairs in that homological degree. This is
28 +
        the output format from ripser.py: ripser(data_user)['dgms']. Only
29 +
        one of diagrams or critical pairs should be specified.
30 +
31 +
    homological_degree : int
32 +
        Represents the homology degree of the persistence diagram.
33 +
34 +
    critical_pairs : list, optional
35 +
        A list of lists of critical pairs (points, values) for specifying a persistence landscape.
36 +
        These do not necessarily have to arise from a persistence
37 +
        diagram. Only one of diagrams or critical pairs should be specified.
38 +
39 +
    compute : bool, optional
40 +
        A flag determining whether landscape functions are computed upon instantiation.
41 +
42 +
43 +
    Methods
44 +
    -------
45 +
    compute_landscape : computes the set of critical pairs and stores them in
46 +
        the attribute `critical_pairs`
47 +
48 +
    compute_landscape_by_depth : compute the set of critical pairs in a certain
49 +
        range.
50 +
51 +
    p_norm : returns p-norm of a landscape
52 +
53 +
    sup_norm : returns sup norm of a landscape
54 +
55 +
    """
56 +
57 +
    def __init__(
58 +
            self, dgms: list = [], homological_degree: int = 0,
59 +
            critical_pairs: list = [], compute: bool = False) -> None:
60 +
        super().__init__(dgms=dgms, homological_degree=homological_degree)
61 +
        self.critical_pairs = critical_pairs
62 +
        if dgms:
63 +
            self.dgms = dgms[self.homological_degree]
64 +
        else:  # critical pairs are passed. Is this the best check for this?
65 +
            self.dgms = dgms
66 +
        self.max_depth = len(self.critical_pairs)
67 +
        if compute:
68 +
            self.compute_landscape()
69 +
70 +
    def __repr__(self):
71 +
        return (
72 +
            "The persistence landscape of diagrams in homological "
73 +
            f"degree {self.homological_degree}"
74 +
        )
75 +
76 +
    def __neg__(self):
77 +
        self.compute_landscape()
78 +
        return PersLandscapeExact(homological_degree=self.homological_degree,
79 +
                                  critical_pairs=[[[a, -b] for a, b in depth_list]
80 +
                                                  for depth_list in self.critical_pairs])
81 +
        """
82 +
        Computes the negation of a persistence landscape object
83 +
84 +
        Returns
85 +
        -------
86 +
        None.
87 +
88 +
        Usage
89 +
        -----
90 +
        (-P).critical_pairs returns the sum
91 +
        """
92 +
93 +
    def __add__(self, other):
94 +
        # This requires a list implementation as written.
95 +
        if self.homological_degree != other.homological_degree:
96 +
            raise ValueError("homological degrees must match")
97 +
        return PersLandscapeExact(
98 +
            critical_pairs=union_crit_pairs(self, other),
99 +
            homological_degree=self.homological_degree
100 +
        )
101 +
        """
102 +
        Computes the sum of two persistence landscape objects
103 +
104 +
        Parameters
105 +
        -------
106 +
        other: PersLandscapeExact
107 +
108 +
        Returns
109 +
        -------
110 +
        None.
111 +
112 +
        Usage
113 +
        -----
114 +
        (P+Q).critical_pairs returns the sum
115 +
        """
116 +
117 +
    def __sub__(self, other):
118 +
        return self + -other
119 +
120 +
        """
121 +
        Computes the difference of two persistence landscape objects
122 +
123 +
        Parameters
124 +
        -------
125 +
        other: PersLandscape
126 +
127 +
        Returns
128 +
        -------
129 +
        None.
130 +
131 +
        Usage
132 +
        -----
133 +
        (P-Q).critical_pairs returns the difference
134 +
        """
135 +
136 +
    def __mul__(self, other: float):
137 +
        self.compute_landscape()
138 +
        return PersLandscapeExact(
139 +
            homological_degree=self.homological_degree,
140 +
            critical_pairs=[[(a, other * b) for a, b in depth_list]
141 +
                            for depth_list in self.critical_pairs])
142 +
        """
143 +
        Computes the product of a persistence landscape object and a float
144 +
145 +
        Parameters
146 +
        -------
147 +
        other: float
148 +
            the number the persistence landscape will be multiplied by
149 +
150 +
        Returns
151 +
        -------
152 +
        None.
153 +
154 +
        Usage
155 +
        -----
156 +
        (3*P).critical_pairs returns the product
157 +
        """
158 +
159 +
    def __rmul__(self, other: float):
160 +
        return self.__mul__(other)
161 +
        """
162 +
        Computes the product of a persistence landscape object and a float
163 +
164 +
        Parameters
165 +
        -------
166 +
        other: float
167 +
            the number the persistence landscape will be multiplied by
168 +
169 +
        Returns
170 +
        -------
171 +
        None.
172 +
173 +
        Usage
174 +
        -----
175 +
        (P*3).critical_pairs returns the product
176 +
177 +
        """
178 +
179 +
    def __truediv__(self, other: float):
180 +
        if other == 0.:
181 +
            raise ValueError("Cannot divide by zero")
182 +
        return self * (1.0 / other)
183 +
        """
184 +
        Computes the quotient of a persistence landscape object and a float
185 +
186 +
        Parameters
187 +
        -------
188 +
        other: float
189 +
            the divisor of the persistence landscape object
190 +
191 +
        Returns
192 +
        -------
193 +
        None.
194 +
195 +
        Usage
196 +
        -----
197 +
        (P/3).critical_pairs returns the quotient
198 +
199 +
        """
200 +
201 +
    # Indexing, slicing
202 +
    def __getitem__(self, key: slice) -> list:
203 +
        """
204 +
        Returns a list of critical pairs corresponding in range specified by
205 +
        depth
206 +
207 +
        Parameters
208 +
        ----------
209 +
        key : slice object
210 +
211 +
        Returns
212 +
        -------
213 +
        list
214 +
            The critical pairs of the landscape function corresponding
215 +
        to depths given by key
216 +
        """
217 +
        self.compute_landscape()
218 +
        return self.critical_pairs[key]
219 +
220 +
    def compute_landscape(self, verbose: bool = False) -> list:
221 +
        """
222 +
        Stores the persistence landscape in `self.critical_pairs` as a list
223 +
224 +
        Parameters
225 +
        ----------
226 +
        verbose: bool
227 +
            if true, progress messages are printed during computation
228 +
229 +
        Returns
230 +
        -------
231 +
        None.
232 +
233 +
        """
234 +
235 +
        verboseprint = print if verbose else lambda *a, **k: None
236 +
237 +
        # check if landscapes were already computed
238 +
        if self.critical_pairs:
239 +
            verboseprint(
240 +
                'self.critical_pairs was not empty and stored value was returned')
241 +
            return self.critical_pairs
242 +
243 +
        A = self.dgms
244 +
        # change A into a list
245 +
        A = list(A)
246 +
        # change inner nparrays into lists
247 +
        for i in range(len(A)):
248 +
            A[i] = list(A[i])
249 +
        # store infitiy values
250 +
        infty_bar = False
251 +
        if A[-1][1] == np.inf:
252 +
            A. pop(-1)
253 +
            infty_bar = True
254 +
            # TODO: Do we need this infty_bar variable?
255 +
256 +
        landscape_idx = 0
257 +
        L = []
258 +
259 +
        # Sort A: read from right to left inside ()
260 +
        A = sorted(A, key=lambda x: [x[0], -x[1]])
261 +
262 +
        while A:
263 +
            verboseprint(f'computing landscape index {landscape_idx+1}...')
264 +
265 +
            # add a 0 element to begin count of lamda_k
266 +
            #size_landscapes = np.append(size_landscapes, [0])
267 +
268 +
            # pop first term
269 +
            b, d = A.pop(0)
270 +
            verboseprint(f'(b,d) is ({b},{d})')
271 +
272 +
            # outer brackets for start of L_k
273 +
            L.append([[-np.inf, 0], [b, 0], [(b + d) / 2, (d - b) / 2]])
274 +
275 +
            # check for duplicates of (b,d)
276 +
            duplicate = 0
277 +
278 +
            for j, itemj in enumerate(A):
279 +
                if itemj == [b, d]:
280 +
                    duplicate += 1
281 +
                    A.pop(j)
282 +
                else:
283 +
                    break
284 +
285 +
            while L[landscape_idx][-1] != [np.inf, 0]:
286 +
287 +
                # if d is > = all remaining pairs, then end lambda
288 +
                # includes edge case with (b,d) pairs with the same death time
289 +
                if all(d >= _[1] for _ in A):
290 +
                    # add to end of L_k
291 +
                    L[landscape_idx].extend([[d, 0], [np.inf, 0]])
292 +
                    # for duplicates, add another copy of the last computed
293 +
                    # lambda
294 +
                    for _ in range(duplicate):
295 +
                        L.append(L[-1])
296 +
                        landscape_idx += 1
297 +
298 +
                else:
299 +
                    # set (b', d')  to be the first term so that d' > d
300 +
                    for i, item in enumerate(A):
301 +
                        if item[1] > d:
302 +
                            b_prime, d_prime = A.pop(i)
303 +
                            verboseprint(f'(bp,dp) is ({b_prime},{d_prime})')
304 +
                            break
305 +
306 +
                    # Case I
307 +
                    if b_prime > d:
308 +
                        L[landscape_idx].extend([[d, 0]])
309 +
310 +
                    # Case II
311 +
                    if b_prime >= d:
312 +
                        L[landscape_idx].extend([[b_prime, 0]])
313 +
314 +
                    # Case III
315 +
                    else:
316 +
                        L[landscape_idx].extend(
317 +
                            [[(b_prime + d) / 2, (d - b_prime) / 2]])
318 +
                        # push (b', d) into A in order
319 +
                        # find the first b_i in A so that b'<= b_i
320 +
321 +
                        # push (b', d) to end of list if b' not <= any bi
322 +
                        ind = len(A)
323 +
                        for i in range(len(A)):
324 +
                            if b_prime <= A[i][0]:
325 +
                                ind = i  # index to push (b', d) if b' != b_i
326 +
                                break
327 +
                        # if b' not < = any bi, put at the end of list
328 +
                        if ind == len(A):
329 +
                            pass
330 +
                        # if b' = b_i
331 +
                        elif b_prime == A[ind][0]:
332 +
                            # pick out (bk,dk) such that b' = bk
333 +
                            A_i = [item for item in A if item[0] == b_prime]
334 +
335 +
                            # move index to the right one for every d_i such
336 +
                            # that d < d_i
337 +
                            for j in range(len(A_i)):
338 +
                                if d < A_i[j][1]:
339 +
                                    ind += 1
340 +
341 +
                                # d > dk for all k
342 +
343 +
                        A.insert(ind, [b_prime, d])
344 +
345 +
                    L[landscape_idx].extend(
346 +
                        [[(b_prime + d_prime) / 2, (d_prime - b_prime) / 2]])
347 +
                    #size_landscapes[landscape_idx] += 1
348 +
349 +
                    b, d = b_prime, d_prime  # Set (b',d')= (b, d)
350 +
351 +
            landscape_idx += 1
352 +
353 +
        verboseprint(
354 +
            'self.critical_pairs was empty and algorthim was executed')
355 +
        # gets rid of infinity terms
356 +
        # As written, this function shouldn't return anything, but rather
357 +
        # update self.critical pairs.
358 +
        self.max_depth = len(L)
359 +
        self.critical_pairs = [item[1:-1] for item in L]
360 +
361 +
    def compute_landscape_by_depth(self, depth: int) -> list:
362 +
        """
363 +
        Returns the function of depth from `self.critical_pairs` as a list
364 +
365 +
        Parameters
366 +
        ----------
367 +
        depth: int
368 +
            the depth of the desired landscape function
369 +
        """
370 +
371 +
        if self.critical_pairs:
372 +
            return self.critical_pairs[depth]
373 +
        else:
374 +
            return self.compute_landscape()[depth]
375 +
376 +
    def p_norm(self, p: int = 2) -> float:
377 +
        """
378 +
        Returns the L_{`p`} norm of `self.critical_pairs`
379 +
380 +
        Parameters
381 +
        ----------
382 +
        p: float, default 2
383 +
            value p of the L_{`p`} norm
384 +
        """
385 +
        if p == -1:
386 +
            return self.infinity_norm()
387 +
        if p < -1 or -1 < p < 0:
388 +
            raise ValueError(f"p can't be negative, but {p} was passed")
389 +
        self.compute_landscape()
390 +
        result = 0.
391 +
        for l in self.critical_pairs:
392 +
            for [[x0, y0], [x1, y1]] in zip(l, l[1:]):
393 +
                if y0 == y1:
394 +
                    # horizontal line segment
395 +
                    result += (np.abs(y0)**p) * (x1 - x0)
396 +
                    continue
397 +
                # slope is well-defined
398 +
                slope = (y1 - y0) / (x1 - x0)
399 +
                b = y0 - slope * x0
400 +
                # segment crosses the x-axis
401 +
                if (y0 < 0 and y1 > 0) or (y0 > 0 and y1 < 0):
402 +
                    z = -b / slope
403 +
                    ev_x1 = (slope * x1 + b)**(p + 1) / (slope * (p + 1))
404 +
                    ev_x0 = (slope * x0 + b)**(p + 1) / (slope * (p + 1))
405 +
                    ev_z = (slope * z + +b)**(p + 1) / (slope * (p + 1))
406 +
                    result += np.abs(ev_x1 + ev_x0 - 2 * ev_z)
407 +
                # segment does not cross the x-axis
408 +
                else:
409 +
                    ev_x1 = (slope * x1 + b)**(p + 1) / (slope * (p + 1))
410 +
                    ev_x0 = (slope * x0 + b)**(p + 1) / (slope * (p + 1))
411 +
                    result += np.abs(ev_x1 - ev_x0)
412 +
        return (result)**(1.0 / p)
413 +
414 +
    def sup_norm(self) -> float:
415 +
        """
416 +
        Returns the sup norm of `self.critical_pairs`
417 +
        """
418 +
419 +
        self.compute_landscape()
420 +
        cvals = list(itertools.chain.from_iterable(self.critical_pairs))
421 +
        return max(np.abs(cvals), key=itemgetter(1))[1]
422 +
423 +
424 +
###############################
425 +
# End PLE class definition
426 +
###############################
427 +
428 +
def vectorize(l: PersLandscapeExact, start: float = None,
429 +
              stop: float = None, num_dims: int = 500) -> PersLandscapeGrid:
430 +
    """ Converts a `PersLandscapeExact` type to a `PersLandscapeGrid` type.
431 +
432 +
433 +
    Parameters
434 +
    ----------
435 +
    start: float, default None
436 +
        start value of grid
437 +
    if start is not inputed, start is assigned to minimum birth value
438 +
439 +
    stop: float, default None
440 +
        stop value of grid
441 +
    if stop is not inputed, stop is assigned to maximum death value
442 +
443 +
    num_dims: int, default 500
444 +
        number of points starting from `start` and ending at `stop`
445 +
446 +
    """
447 +
448 +
    l.compute_landscape()
449 +
    if start is None:
450 +
        start = min(l.critical_pairs, key=itemgetter(0))[0]
451 +
    if stop is None:
452 +
        stop = max(l.critical_pairs, key=itemgetter(0))[0]
453 +
    grid = np.linspace(start, stop, num_dims)
454 +
    result = []
455 +
    # creates sequential pairs of points for each lambda in critical_pairs
456 +
    for depth in l.critical_pairs:
457 +
        xs, ys = zip(*depth)
458 +
        result.append(np.interp(grid, xs, ys))
459 +
    return PersLandscapeGrid(start=start, stop=stop, num_dims=num_dims,
460 +
                             homological_degree=l.homological_degree, values=np.array(result))

@@ -0,0 +1,358 @@
Loading
1 +
"""
2 +
Define Grid Persistence Landscape class.
3 +
"""
4 +
import numpy as np
5 +
import itertools
6 +
from .landscape_auxiliary import pairs_snap, union_vals, ndsnap_regular
7 +
from operator import itemgetter, attrgetter
8 +
from .PersLandscape import PersLandscape
9 +
10 +
11 +
class PersLandscapeGrid(PersLandscape):
12 +
    """
13 +
    Persistence Landscape Grid class.
14 +
15 +
    This class implements an approximate version of Persistence Landscape,
16 +
    given by sampling the landscape functions on a grid. This version is only an
17 +
    approximation to the true landscape, but given a fine enough grid, this should
18 +
    suffice for most applications. If an exact
19 +
    calculation with no approximation is desired, consider `PersLandscapeExact`.
20 +
21 +
    The default parameters for start and stop favor dgms over values. That
22 +
    is, if both dgms and values are passed but start and stop are not, the
23 +
    start and stop values will be determined by dgms.
24 +
25 +
    Parameters
26 +
    ----------
27 +
    start : float, optional
28 +
        The start parameter of the approximating grid.
29 +
30 +
    stop : float, optional
31 +
        The stop parameter of the approximating grid.
32 +
33 +
    num_dims : int, optional
34 +
        The number of dimensions of the approximation, equivalently the
35 +
        number of steps in the grid.
36 +
37 +
    dgms : list[list]
38 +
        A list lists of birth-death pairs for each homological degree.
39 +
40 +
    homological_degree : int
41 +
        represents the homology degree of the persistence diagram.
42 +
43 +
    vales
44 +
45 +
    Methods
46 +
    -------
47 +
48 +
49 +
    Examples
50 +
    --------
51 +
52 +
53 +
    """
54 +
55 +
    def __init__(
56 +
            self, start: float = None, stop: float = None, num_dims: int = 500,
57 +
            dgms: list = [], homological_degree: int = 0,
58 +
            values=np.array([]), compute: bool = False) -> None:
59 +
60 +
        super().__init__(dgms=dgms, homological_degree=homological_degree)
61 +
        if dgms:  # diagrams are passed
62 +
            self.dgms = dgms[self.homological_degree]
63 +
            # remove infity values
64 +
            # ~: indexes everything but values satisfying the condition
65 +
            # axis = 1: checks the condition for each row
66 +
            # np.any: if any element in the row satisfies the condition
67 +
            # it gets indexed
68 +
            self.dgms = self.dgms[~np.any(self.dgms == np.inf, axis=1)]
69 +
            # calculate start and stop
70 +
            if start is None:
71 +
                start = min(self.dgms, key=itemgetter(0))[0]
72 +
            if stop is None:
73 +
                stop = max(self.dgms, key=itemgetter(1))[1]
74 +
        elif values.size > 0:  # values passed, diagrams weren't
75 +
            self.dgms = dgms
76 +
            if start is None:
77 +
                raise ValueError(
78 +
                    'start parameter must be passed if values are passed.')
79 +
            if stop is None:
80 +
                raise ValueError(
81 +
                    'stop parameter must be passed if values are passed.')
82 +
                # stop = np.amax(values)
83 +
        self.start = start
84 +
        self.stop = stop
85 +
        self.values = values
86 +
        self.num_dims = num_dims
87 +
        if compute:
88 +
            self.compute_landscape()
89 +
90 +
    def __repr__(self) -> str:
91 +
92 +
        return ('The persistence landscapes of diagrams in homological '
93 +
                f'degree {self.homological_degree} on grid from {self.start} to {self.stop}'
94 +
                f' with step size {self.num_dims}')
95 +
96 +
    def compute_landscape(self, verbose: bool = False) -> list:
97 +
98 +
        verboseprint = print if verbose else lambda *a, **k: None
99 +
100 +
        if self.values.size:
101 +
            verboseprint('values was stored, exiting')
102 +
            return
103 +
104 +
        verboseprint('values was empty, computing values')
105 +
        # make grid
106 +
        grid_values, step = np.linspace(self.start, self.stop, self.num_dims,
107 +
                                        retstep=True)
108 +
        #grid_values = list(grid_values)
109 +
        #grid = np.array([[x,y] for x in grid_values for y in grid_values])
110 +
        bd_pairs = self.dgms
111 +
112 +
        # create list of triangle top for each birth death pair
113 +
        birth: 'np.ndarray' = bd_pairs[:, 0]
114 +
        death: 'np.ndarray' = bd_pairs[:, 1]
115 +
        triangle_top_ycoord = (death - birth) / 2
116 +
        triangle_top = np.array(
117 +
            list(zip((birth + death) / 2, (death - birth) / 2)))
118 +
119 +
        # snap birth-death pairs and triangle tops to grid
120 +
        #bd_pairs_grid = pairs_snap(bd_pairs, grid)
121 +
        bd_pairs_grid = ndsnap_regular(bd_pairs, *(grid_values, grid_values))
122 +
        #triangle_top_grid = pairs_snap(triangle_top, grid)
123 +
        triangle_top_grid = ndsnap_regular(
124 +
            triangle_top, *(grid_values, grid_values))
125 +
126 +
        # make grid dictionary
127 +
        index = list(range(self.num_dims))
128 +
        dict_grid = dict(zip(grid_values, index))
129 +
130 +
        # initialze W to a list of 2m + 1 empty lists
131 +
        W = [[] for _ in range(self.num_dims)]
132 +
133 +
        # for each birth death pair
134 +
        for ind_in_bd_pairs, bd in enumerate(bd_pairs_grid):
135 +
            [b, d] = bd
136 +
            ind_in_Wb = dict_grid[b]  # index in W
137 +
            ind_in_Wd = dict_grid[d]  # index in W
138 +
139 +
            # step through by x value
140 +
            j = 0
141 +
            # j in (b, b+d/2]
142 +
            for _ in np.arange(
143 +
                    triangle_top_grid[ind_in_bd_pairs, 0], b, -step):
144 +
                j += 1
145 +
                # j*step: adding points from a line with slope 1
146 +
                W[ind_in_Wb + j].append(j * step)
147 +
148 +
            j = 0
149 +
            # j in (b+d/2, d)
150 +
            for _ in np.arange(
151 +
                    triangle_top_grid[ind_in_bd_pairs, 0] + step, d, step):
152 +
                j += 1
153 +
                W[ind_in_Wd - j].append(j * step)
154 +
155 +
        # sort each list in W
156 +
        for i in range(len(W)):
157 +
            W[i] = sorted(W[i], reverse=True)
158 +
159 +
        # calculate k: max length of lists in W
160 +
        K = max([len(_) for _ in W])
161 +
162 +
        # initialize L to be a zeros matrix of size K x (2m+1)
163 +
        L = np.array([np.zeros(self.num_dims) for _ in range(K)])
164 +
165 +
        # input Values from W to L
166 +
        for i in range(self.num_dims):
167 +
            for k in range(len(W[i])):
168 +
                L[k][i] = W[i][k]
169 +
170 +
        # check if L is empty
171 +
        if not L.size:
172 +
            L = np.array(['empty'])
173 +
            print('Bad choice of grid, values is empty')
174 +
175 +
        self.values = L
176 +
        return
177 +
178 +
    def values_to_pairs(self):
179 +
        """
180 +
        Converts function values to ordered pairs and returns them.
181 +
182 +
        Returns
183 +
        -------
184 +
185 +
        """
186 +
        self.compute_landscape()
187 +
        grid_values = list(np.linspace(self.start, self.stop, self.num_dims))
188 +
        result = []
189 +
        for l in self.values:
190 +
            pairs = list(zip(grid_values, l))
191 +
            result.append(pairs)
192 +
        return np.array(result)
193 +
194 +
    def __add__(self, other):
195 +
        super().__add__(other)
196 +
        if self.start != other.start:
197 +
            raise ValueError("Start values of grids do not coincide")
198 +
        if self.stop != other.stop:
199 +
            raise ValueError("Stop values of grids do not coincide")
200 +
        if self.num_dims != other.num_dims:
201 +
            raise ValueError("Number of steps of grids do not coincide")
202 +
        self_pad, other_pad = union_vals(self.values, other.values)
203 +
        return PersLandscapeGrid(start=self.start, stop=self.stop,
204 +
                                 num_dims=self.num_dims,
205 +
                                 homological_degree=self.homological_degree,
206 +
                                 values=self_pad + other_pad)
207 +
208 +
    def __neg__(self):
209 +
        return PersLandscapeGrid(
210 +
            start=self.start,
211 +
            stop=self.stop,
212 +
            num_dims=self.num_dims,
213 +
            homological_degree=self.homological_degree,
214 +
            values=np.array([-1 * depth_array for depth_array in self.values]))
215 +
        pass
216 +
217 +
    def __sub__(self, other):
218 +
        return self + -other
219 +
220 +
    def __mul__(self, other: float):
221 +
        super().__mul__(other)
222 +
        return PersLandscapeGrid(
223 +
            start=self.start,
224 +
            stop=self.stop,
225 +
            num_dims=self.num_dims,
226 +
            homological_degree=self.homological_degree,
227 +
            values=np.array([other * depth_array for depth_array in self.values]))
228 +
229 +
    def __rmul__(self, other: float):
230 +
        return self.__mul__(other)
231 +
232 +
    def __truediv__(self, other: float):
233 +
        super().__truediv__(other)
234 +
        return (1.0 / other) * self
235 +
236 +
    def __getitem__(self, key: slice) -> list:
237 +
        """
238 +
        Returns a list of values corresponding in range specified by
239 +
        depth
240 +
241 +
        Parameters
242 +
        ----------
243 +
        key : slice object
244 +
245 +
        Returns
246 +
        -------
247 +
        list
248 +
            The values of the landscape function corresponding
249 +
        to depths given by key
250 +
        """
251 +
        self.compute_landscape()
252 +
        return self.values[key]
253 +
254 +
    def p_norm(self, p: int = 2) -> float:
255 +
        return np.sum([np.linalg.norm(depth, p) for depth in self.values])
256 +
257 +
    def sup_norm(self) -> float:
258 +
        return np.max(np.abs(self.values))
259 +
260 +
###############################
261 +
# End PLG class definition
262 +
###############################
263 +
264 +
265 +
def snap_PL(l: list, start: float = None, stop: float = None,
266 +
            num_dims: int = None) -> list:
267 +
    """
268 +
    Given a list `l` of PersLandscapeGrid types, convert them to a list
269 +
    where each entry has the same start, stop, and num_dims. This puts each
270 +
    entry of `l` on the same grid, so they can be added, averaged, etc.
271 +
272 +
    This assumes they're all of the same homological degree.
273 +
    """
274 +
    if start is None:
275 +
        start = min(l, key=attrgetter('start')).start
276 +
    if stop is None:
277 +
        stop = max(l, key=attrgetter('stop')).stop
278 +
    if num_dims is None:
279 +
        num_dims = max(l, key=attrgetter('num_dims')).num_dims
280 +
    grid = np.linspace(start, stop, num_dims)
281 +
    k = []
282 +
    for pl in l:
283 +
        snapped_landscape = []
284 +
        for funct in pl:
285 +
            # snap each function and store
286 +
            snapped_landscape.append(np.array(np.interp(grid, np.linspace(pl.start, pl.stop,
287 +
                                                                          pl.num_dims), funct)))
288 +
        # store snapped persistence landscape
289 +
        k.append(PersLandscapeGrid(start=start, stop=stop, num_dims=num_dims,
290 +
                                   values=np.array(snapped_landscape),
291 +
                                   homological_degree=pl.homological_degree))
292 +
    return k
293 +
294 +
295 +
def lc_grid(landscapes: list, coeffs: list, start: float = None, stop: float = None,
296 +
            num_dims: int = None) -> PersLandscapeGrid:
297 +
    """ Compute the linear combination of a list of PersLandscapeGrid objects.
298 +
299 +
300 +
301 +
        Parameters
302 +
        -------
303 +
        landscapes: list
304 +
            a list of PersLandscapeGrid objects
305 +
306 +
        coeffs: list
307 +
            a list of the coefficients defining the linear combination
308 +
309 +
        start: float
310 +
            starting value for the common grid for PersLandscapeGrid objects
311 +
        in `landscapes`
312 +
313 +
        stop: float
314 +
            last value in the common grid for PersLandscapeGrid objects
315 +
        in `landscapes`
316 +
317 +
        num_dims: int
318 +
            number of steps on the common grid for PersLandscapeGrid objects
319 +
        in `landscapes`
320 +
321 +
        Returns
322 +
        -------
323 +
        PersLandscapeGrid:
324 +
            The specified linear combination of PersLandscapeGrid objects
325 +
        in `landscapes`
326 +
327 +
    """
328 +
    l = snap_PL(landscapes, start=start, stop=stop, num_dims=num_dims)
329 +
    return np.sum(np.array(coeffs) * np.array(l))
330 +
331 +
332 +
def average_grid(landscapes: list, start: float = None, stop: float = None,
333 +
                 num_dims: int = None) -> PersLandscapeGrid:
334 +
    """ Compute the average of a list of PersLandscapeGrid objects.
335 +
         Parameters
336 +
        -------
337 +
        landscapes: list
338 +
            a list of PersLandscapeGrid objects
339 +
340 +
        start: float
341 +
            starting value for the common grid for PersLandscapeGrid objects
342 +
        in `landscapes`
343 +
344 +
        stop: float
345 +
            last value in the common grid for PersLandscapeGrid objects
346 +
        in `landscapes`
347 +
348 +
        num_dims: int
349 +
            number of steps on the common grid for PersLandscapeGrid objects
350 +
        in `landscapes`
351 +
352 +
        Returns
353 +
        -------
354 +
        PersLandscapeGrid:
355 +
            The specified average of PersLandscapeGrid objects in `landscapes`
356 +
    """
357 +
    return lc_grid(landscapes=landscapes, coeffs=[1.0 / len(landscapes) for _ in landscapes],
358 +
                   start=start, stop=stop, num_dims=num_dims)

@@ -1, +1, @@
Loading
1 -
__version__ = "0.1.4"
1 +
__version__ = "0.1.5"

@@ -0,0 +1,212 @@
Loading
1 +
"""
2 +
Visualization methods for plotting persistence landscapes.
3 +
4 +
"""
5 +
6 +
import itertools
7 +
import matplotlib as mpl
8 +
import matplotlib.pyplot as plt
9 +
import numpy as np
10 +
from .PersLandscape import PersLandscape
11 +
from .PersLandscapeExact import PersLandscapeExact
12 +
from .PersLandscapeGrid import PersLandscapeGrid
13 +
from operator import itemgetter
14 +
from matplotlib import cm
15 +
16 +
# TODO: Use styles instead of colormaps directly? Or both?
17 +
18 +
mpl.rcParams['text.usetex'] = True
19 +
20 +
def plot_landscape(landscape: PersLandscape,
21 +
                   num_steps:int = 3000,
22 +
                   colormap = "default",
23 +
                   title = None,
24 +
                   labels = None,
25 +
                   ):
26 +
    """
27 +
    plot landscape functions. 
28 +
    """
29 +
    if isinstance(landscape, PersLandscapeGrid):
30 +
        return plot_landscape_grid(landscape = landscape, num_steps = num_steps, 
31 +
                                   colormap = colormap, title = title, labels = labels)
32 +
    if isinstance(landscape, PersLandscapeExact):
33 +
        return plot_landscape_exact(landscape = landscape, num_steps = num_steps, 
34 +
                                    colormap = colormap, title = title, labels = labels)
35 +
    
36 +
37 +
def plot_landscape_exact(landscape: PersLandscapeExact,
38 +
                   num_steps: int = 3000,
39 +
                   colormap = "default",
40 +
                   alpha = 0.8,
41 +
                   labels = None,
42 +
                   padding: float = 0.1,
43 +
                   depth_padding: float = 0.7,
44 +
                   title = None):
45 +
    """
46 +
    A plot of the persistence landscape.
47 +
    
48 +
    Warning: This function is quite slow, especially for large landscapes. 
49 +
    
50 +
    Parameters
51 +
    ----------
52 +
    num_steps: int, default 3000
53 +
        number of sampled points that are plotted
54 +
    
55 +
    color, defualt cm.viridis
56 +
        color scheme for shading of landscape functions
57 +
    
58 +
    alpha, default 0.8
59 +
        transparency of shading
60 +
        
61 +
    padding: float, default 0.1
62 +
        amount of empty grid shown to left and right of landscape functions
63 +
    
64 +
    depth_padding: float, default = 0.7
65 +
        amount of space between sequential landscape functions
66 +
        
67 +
    """
68 +
    fig = plt.figure()
69 +
    plt.style.use(colormap)
70 +
    ax = fig.gca(projection='3d')
71 +
    landscape.compute_landscape()
72 +
    # itemgetter index selects which entry to take max/min wrt.
73 +
    # the hanging [0] or [1] takes that entry.
74 +
    crit_pairs = list(itertools.chain.from_iterable(landscape.critical_pairs))
75 +
    min_crit_pt = min(crit_pairs, key=itemgetter(0))[0] # smallest birth time
76 +
    max_crit_pt = max(crit_pairs, key=itemgetter(0))[0] # largest death time
77 +
    max_crit_val = max(crit_pairs,key=itemgetter(1))[1] # largest peak of landscape
78 +
    min_crit_val = min(crit_pairs, key=itemgetter(1))[1] # smallest peak of landscape
79 +
    norm = mpl.colors.Normalize(vmin=min_crit_val, vmax=max_crit_val)
80 +
    scalarMap = mpl.cm.ScalarMappable(norm=norm)
81 +
    # x-axis for grid
82 +
    domain = np.linspace(min_crit_pt-padding*0.1, max_crit_pt+padding*0.1, num=num_steps)
83 +
    # for each landscape function
84 +
    for depth, l in enumerate(landscape):
85 +
        # sequential pairs in landscape
86 +
        xs, zs = zip(*l)
87 +
        image = np.interp(domain, xs, zs) 
88 +
        for x, z in zip(domain,image):
89 +
            if z == 0.:
90 +
                # plot a single point here?
91 +
                continue # moves to the next iterable in for loop
92 +
            if z > 0.:
93 +
                ztuple = [0, z] 
94 +
            elif z < 0.:
95 +
                ztuple = [z,0] 
96 +
            # for coloring https://matplotlib.org/3.1.0/tutorials/colors/colormapnorms.html
97 +
            ax.plot(
98 +
                [x,x], # plotting a line to get shaded function
99 +
                [depth_padding*depth,depth_padding*depth],
100 +
                ztuple,
101 +
                linewidth=0.5,
102 +
                alpha=alpha,
103 +
                #c=colormap(norm(z)))
104 +
                c=scalarMap.to_rgba(z))
105 +
            ax.plot([x], [depth_padding*depth], [z], 'k.', markersize=0.1)
106 +
    
107 +
    #ax.set_xlabel('X')
108 +
    ax.set_ylabel('depth')
109 +
    #ax.set_zlabel('Z')
110 +
    #ax.set_xlim(max_crit_pt+padding, min_crit_pt-padding) # reversed
111 +
    #ax.set_ylim(0, depth_padding*landscape.max_depth+1)
112 +
    # ax.set_zlim(0, max_crit_val+padding)
113 +
    # for line in ax.xaxis.get_ticklines():
114 +
    #     line.set_visible(False)
115 +
    # for line in ax.yaxis.get_ticklines():
116 +
    #     line.set_visible(False)
117 +
    # for line in ax.zaxis.get_ticklines():
118 +
    #     line.set_visible(False)
119 +
    #ax.set_xticklabels(np.arange(min_crit_pt,max_crit_pt, 0.2))
120 +
    #ax.set_yticklabels(np.arange(0, landscape.max_depth, 3))
121 +
    #plt.axis(False)
122 +
    if title: plt.title(title)
123 +
    ax.view_init(10,90)
124 +
    plt.show()
125 +
126 +
def plot_landscape_grid(landscape: PersLandscapeGrid,
127 +
                   num_steps: int = 3000,
128 +
                   colormap = "default",
129 +
                   labels = None,
130 +
                   alpha = 0.8,
131 +
                   padding: float = 0.1,
132 +
                   depth_padding: float = 0.7,
133 +
                   title = None):
134 +
    """
135 +
    A plot of the persistence landscape.
136 +
    
137 +
    Warning: This function is quite slow, especially for large landscapes. 
138 +
    
139 +
    Parameters
140 +
    ----------
141 +
    num_steps: int, default 3000
142 +
        number of sampled points that are plotted
143 +
    
144 +
    color, defualt cm.viridis
145 +
        color scheme for shading of landscape functions
146 +
    
147 +
    alpha, default 0.8
148 +
        transparency of shading
149 +
        
150 +
    padding: float, default 0.1
151 +
        amount of empty grid shown to left and right of landscape functions
152 +
    
153 +
    depth_padding: float, default = 0.7
154 +
        amount of space between sequential landscape functions
155 +
        
156 +
    """
157 +
    fig = plt.figure()
158 +
    ax = fig.gca(projection='3d')
159 +
    plt.style.use(colormap)
160 +
    landscape.compute_landscape()
161 +
    # TODO: RE the following line: is this better than np.concatenate?
162 +
    #       There is probably an even better way without creating an intermediary.
163 +
    _vals = list(itertools.chain.from_iterable(landscape.values)) 
164 +
    min_val = min(_vals) 
165 +
    max_val = max(_vals) 
166 +
    norm = mpl.colors.Normalize(vmin=min_val, vmax=max_val)
167 +
    scalarMap = mpl.cm.ScalarMappable(norm=norm)
168 +
    # x-axis for grid
169 +
    domain = np.linspace(landscape.start-padding*0.1, landscape.stop+padding*0.1, num=num_steps)
170 +
    # for each landscape function
171 +
    for depth, l in enumerate(landscape):
172 +
        # sequential pairs in landscape
173 +
        # xs, zs = zip(*l)
174 +
        image = np.interp(domain, np.linspace(start=landscape.start, stop=landscape.stop,
175 +
                                              num=landscape.num_dims), l) 
176 +
        for x, z in zip(domain,image):
177 +
            if z == 0.:
178 +
                # plot a single point here?
179 +
                continue # moves to the next iterable in for loop
180 +
            if z > 0.:
181 +
                ztuple = [0, z] 
182 +
            elif z < 0.:
183 +
                ztuple = [z,0] 
184 +
            # for coloring https://matplotlib.org/3.1.0/tutorials/colors/colormapnorms.html
185 +
            ax.plot(
186 +
                [x,x], # plotting a line to get shaded function
187 +
                [depth_padding*depth,depth_padding*depth],
188 +
                ztuple,
189 +
                linewidth=0.5,
190 +
                alpha=alpha,
191 +
                # c=colormap(norm(z)))
192 +
                c=scalarMap.to_rgba(z))
193 +
            ax.plot([x], [depth_padding*depth], [z], 'k.', markersize=0.1)
194 +
    
195 +
    #ax.set_xlabel('X')
196 +
    ax.set_ylabel('depth')
197 +
    #ax.set_zlabel('Z')
198 +
    #ax.set_xlim(max_crit_pt+padding, min_crit_pt-padding) # reversed
199 +
    #ax.set_ylim(0, depth_padding*landscape.max_depth+1)
200 +
    # ax.set_zlim(0, max_crit_val+padding)
201 +
    # for line in ax.xaxis.get_ticklines():
202 +
    #     line.set_visible(False)
203 +
    # for line in ax.yaxis.get_ticklines():
204 +
    #     line.set_visible(False)
205 +
    # for line in ax.zaxis.get_ticklines():
206 +
    #     line.set_visible(False)
207 +
    #ax.set_xticklabels(np.arange(min_crit_pt,max_crit_pt, 0.2))
208 +
    #ax.set_yticklabels(np.arange(0, landscape.max_depth, 3))
209 +
    #plt.axis(False)
210 +
    if title: plt.title(title)
211 +
    ax.view_init(10,90)
212 +
    plt.show()

@@ -6,4 +6,12 @@
Loading
6 6
from .gromov_hausdorff import *
7 7
from .visuals import *
8 8
9 +
# from .landscape import *
10 +
from .PersLandscapeExact import *
11 +
from .PersLandscapeGrid import *
12 +
13 +
from .landscape_visualization import *
14 +
from .landscape_visualization_simple import *
15 +
16 +
9 17
from ._version import __version__

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Learn more Showing 8 files with coverage changes found.

New file persim/landscape_visualization.py
New
Loading file...
New file persim/PersLandscapeGrid.py
New
Loading file...
New file persim/PersLandscape.py
New
Loading file...
New file persim/landscape_auxiliary.py
New
Loading file...
New file persim/landscape.py
New
Loading file...
New file persim/PersLandscapeExact.py
New
Loading file...
New file persim/pl_transformer.py
New
Loading file...
New file persim/landscape_visualization_simple.py
New
Loading file...
Files Coverage
persim -20.39% 60.56%
Project Totals (18 files) 60.56%
Loading