1
# Licensed under a 3-clause BSD style license - see LICENSE.rst
2 1
import json
3 1
import re
4 1
import os
5

6 1
from astroquery.splatalogue.build_species_table import data_path, get_json_species_ids
7

8

9 1
class SpeciesLookuptable(dict):
10

11 1
    def find(self, s, flags=0, return_dict=True,):
12
        """
13
        Search dictionary keys for a regex match to string s
14

15
        Parameters
16
        ----------
17
        s : str
18
            String to compile as a regular expression
19
        return_dict : bool
20
            Return a dictionary if true or just the matching values if false
21
        flags : int
22
            re (regular expression) flags
23

24
        Returns
25
        -------
26
        Subset of parent dictionary if return_dict, else list of values
27
        corresponding to matches
28
        """
29

30 1
        R = re.compile(s, flags)
31

32 1
        out = SpeciesLookuptable(dict((k, v) for k, v in self.items()
33
                                      if R.search(k)))
34

35 1
        if return_dict:
36 1
            return out
37
        else:
38 0
            return out.values()
39

40

41 1
def species_lookuptable(filename='splat-species.json', recache=False):
42
    """
43
    Function to format the species ID results from scraping Splatalogue
44
    into a ``SpeciesLookuptable`` object.
45

46
    The first step is to check whether or not a cached result exists;
47
    if not, we run the scraping routine and use this result. Otherwise,
48
    load and use the cached result.
49

50
    The ``recache`` flag can be used to force a refresh of the local
51
    cache.
52

53
    Parameters
54
    ----------
55
    filename : str, optional
56
        Name of the file cache, by default 'splat-species.json'
57
    recache : bool, optional
58
        If True, force refreshing of the JSON cache, by default False
59

60
    Returns
61
    -------
62
    ``lookuptable``
63
        ``SpeciesLookuptable`` object
64
    """
65 1
    file_cache = data_path(filename)
66
    # check to see if the file exists; if not, we run the
67
    # scraping routine
68 1
    if recache or not os.path.isfile(file_cache):
69 0
        J = get_json_species_ids(filename)
70
    else:
71 1
        with open(data_path(filename), 'r') as f:
72 1
            J = json.load(f)
73 1
    lookuptable = SpeciesLookuptable(dict((v, k) for d in J.values()
74
                                          for k, v in d.items()))
75

76 1
    return lookuptable

Read our documentation on viewing source code .

Loading