#1814 Download epic metadata

Open lvalerom
Coverage Reach
utils/tap/core.py utils/tap/xmlparser/tableSaxParser.py utils/tap/xmlparser/jobSaxParser.py utils/tap/xmlparser/groupSaxParser.py utils/tap/xmlparser/sharedItemsSaxParser.py utils/tap/xmlparser/jobListSaxParser.py utils/tap/xmlparser/utils.py utils/tap/xmlparser/__init__.py utils/tap/model/job.py utils/tap/model/filter.py utils/tap/model/modelutils.py utils/tap/model/group.py utils/tap/model/tapcolumn.py utils/tap/model/taptable.py utils/tap/model/shared_item.py utils/tap/model/shared_to_item.py utils/tap/model/user.py utils/tap/model/__init__.py utils/tap/conn/tapconn.py utils/tap/conn/__init__.py utils/tap/gui/login.py utils/tap/gui/__init__.py utils/tap/taputils.py utils/tap/__init__.py utils/commons.py utils/schema.py utils/decorators.py utils/timer.py utils/download_file_list.py utils/progressbar.py utils/process_asyncs.py utils/docstr_chompers.py utils/mocks.py utils/system_tools.py utils/class_or_instance.py utils/url_helpers.py utils/testing_tools.py utils/__init__.py mast/observations.py mast/discovery_portal.py mast/collections.py mast/services.py mast/cutouts.py mast/cloud.py mast/auth.py mast/core.py mast/utils.py mast/__init__.py alma/core.py alma/tapsql.py alma/utils.py alma/__init__.py vo_conesearch/validator/validate.py vo_conesearch/validator/inspect.py vo_conesearch/validator/tstquery.py vo_conesearch/validator/__init__.py vo_conesearch/validator/exceptions.py vo_conesearch/vos_catalog.py vo_conesearch/core.py vo_conesearch/conesearch.py vo_conesearch/vo_async.py vo_conesearch/__init__.py vo_conesearch/exceptions.py cosmosim/core.py cosmosim/__init__.py eso/core.py eso/__init__.py simbad/core.py simbad/get_votable_fields.py simbad/__init__.py irsa_dust/core.py irsa_dust/utils.py irsa_dust/__init__.py mpc/core.py mpc/__init__.py esasky/core.py esasky/__init__.py vizier/core.py vizier/__init__.py sdss/core.py sdss/field_names.py sdss/__init__.py wfau/core.py wfau/__init__.py esa/hubble/core.py esa/hubble/__init__.py esa/xmm_newton/core.py esa/xmm_newton/__init__.py jplhorizons/core.py jplhorizons/__init__.py cadc/core.py cadc/__init__.py splatalogue/core.py splatalogue/utils.py splatalogue/build_species_table.py splatalogue/load_species_table.py splatalogue/__init__.py splatalogue/templates.py splatalogue/slap.py lamda/core.py lamda/utils.py lamda/__init__.py ned/core.py ned/__init__.py query.py besancon/core.py besancon/__init__.py astrometry_net/core.py astrometry_net/__init__.py open_exoplanet_catalogue/utils.py open_exoplanet_catalogue/oec_query.py open_exoplanet_catalogue/__init__.py nasa_exoplanet_archive/core.py nasa_exoplanet_archive/__init__.py nrao/core.py nrao/__init__.py imcce/core.py imcce/__init__.py atomic/core.py atomic/__init__.py atomic/utils.py jplsbdb/core.py jplsbdb/__init__.py casda/core.py casda/__init__.py gemini/core.py gemini/urlhelper.py gemini/__init__.py cds/core.py cds/__init__.py irsa/core.py irsa/__init__.py oac/core.py oac/__init__.py lcogt/core.py lcogt/__init__.py gaia/core.py gaia/__init__.py heasarc/core.py heasarc/__init__.py ibe/core.py ibe/__init__.py nist/core.py nist/__init__.py hitran/core.py hitran/utils.py hitran/__init__.py skyview/core.py skyview/__init__.py nasa_ads/core.py nasa_ads/__init__.py nasa_ads/utils.py vamdc/core.py vamdc/load_species_table.py vamdc/__init__.py ogle/core.py ogle/__init__.py xmatch/core.py xmatch/__init__.py jplspec/core.py jplspec/lookup_table.py jplspec/__init__.py alfalfa/core.py alfalfa/__init__.py fermi/core.py fermi/__init__.py sha/core.py sha/__init__.py template_module/core.py template_module/__init__.py nvas/core.py nvas/__init__.py magpis/core.py magpis/__init__.py exoplanet_orbit_database/exoplanet_orbit_database.py exoplanet_orbit_database/__init__.py dace/core.py dace/__init__.py image_cutouts/first/core.py image_cutouts/first/__init__.py noirlab/core.py noirlab/__init__.py svo_fps/core.py svo_fps/__init__.py gama/core.py gama/__init__.py vsa/core.py vsa/__init__.py ukidss/core.py ukidss/__init__.py solarsystem/imcce/__init__.py solarsystem/imcce/skybot/__init__.py solarsystem/imcce/miriade/__init__.py solarsystem/jpl/__init__.py solarsystem/jpl/horizons/__init__.py solarsystem/jpl/sbdb/__init__.py solarsystem/__init__.py solarsystem/mpc/__init__.py exceptions.py __init__.py

No flags found

Use flags to group coverage reports by test type, project and/or folders.
Then setup custom commit statuses and notifications for each flag.

e.g., #unittest #integration

#production #enterprise

#frontend #backend

Learn more about Codecov Flags here.


@@ -51,8 +51,10 @@
Loading
51 51
           'TableList',
52 52
           'suppress_vo_warnings',
53 53
           'validate_email',
54 +
           'ASTROPY_LT_4_0',
54 55
           'ASTROPY_LT_4_1']
55 56
57 +
ASTROPY_LT_4_0 = not minversion('astropy', '4.0')
56 58
ASTROPY_LT_4_1 = not minversion('astropy', '4.1')
57 59
58 60
@@ -389,22 +391,17 @@
Loading
389 391
            If the system is unable to create a hardlink, the file will be
390 392
            copied to the target location.
391 393
        """
392 -
        from warnings import warn
393 -
394 394
        self.get_fits()
395 +
        target_key = str(self._target)
395 396
396 -
        try:
397 -
            dldir, urlmapfn = aud._get_download_cache_locs()
398 -
        except (IOError, OSError) as e:
399 -
            msg = 'Remote data cache could not be accessed due to '
400 -
            estr = '' if len(e.args) < 1 else (': ' + str(e))
401 -
            warn(aud.CacheMissingWarning(msg + e.__class__.__name__ + estr))
402 -
403 -
        with _open_shelve(urlmapfn, True) as url2hash:
404 -
            if str(self._target) in url2hash:
405 -
                target = url2hash[str(self._target)]
406 -
            else:
397 +
        # There has been some internal refactoring in astropy.utils.data
398 +
        # so we do this check. Update when minimum required astropy changes.
399 +
        if ASTROPY_LT_4_0:
400 +
            if not aud.is_url_in_cache(target_key):
407 401
                raise IOError("Cached file not found / does not exist.")
402 +
            target = aud.download_file(target_key, cache=True)
403 +
        else:
404 +
            target = aud.download_file(target_key, cache=True, sources=[])
408 405
409 406
        if link_cache == 'hard':
410 407
            try:
@@ -470,22 +467,3 @@
Loading
470 467
    """
471 468
    tables = votable.parse(six.BytesIO(content), pedantic=False)
472 469
    return tables
473 -
474 -
475 -
def _open_shelve(shelffn, withclosing=False):
476 -
    """
477 -
    Opens a shelf file.  If ``withclosing`` is True, it will be opened with
478 -
    closing, allowing use like:
479 -
480 -
        with _open_shelve('somefile',True) as s:
481 -
            ...
482 -
    """
483 -
    import shelve
484 -
    import contextlib
485 -
486 -
    shelf = shelve.open(shelffn, protocol=2)
487 -
488 -
    if withclosing:
489 -
        return contextlib.closing(shelf)
490 -
    else:
491 -
        return shelf

@@ -4,17 +4,27 @@
Loading
4 4
For questions, contact ooberdorf@gemini.edu
5 5
"""
6 6
7 +
import os
8 +
7 9
from datetime import date
8 10
11 +
from astropy import log
9 12
from astropy import units
10 13
from astropy.table import Table, MaskedColumn
11 14
12 15
from astroquery.gemini.urlhelper import URLHelper
13 16
import numpy as np
14 17
15 -
from ..query import BaseQuery
18 +
import logging
19 +
20 +
from ..query import BaseQuery, QueryWithLogin
16 21
from ..utils.class_or_instance import class_or_instance
17 22
from . import conf
23 +
from ..exceptions import AuthenticationWarning
24 +
25 +
26 +
logger = logging.getLogger(__name__)
27 +
18 28
19 29
__all__ = ['Observations', 'ObservationsClass']  # specifies what to import
20 30
@@ -92,7 +102,7 @@
Loading
92 102
]
93 103
94 104
95 -
class ObservationsClass(BaseQuery):
105 +
class ObservationsClass(QueryWithLogin):
96 106
97 107
    server = conf.server
98 108
    url_helper = URLHelper(server)
@@ -106,6 +116,27 @@
Loading
106 116
        """
107 117
        super().__init__()
108 118
119 +
    def _login(self, username, password):
120 +
        """
121 +
        Login to the Gemini Archive website.
122 +
123 +
        This method will authenticate the session as a particular user.  This may give you access
124 +
        to additional information or access based on your credentials
125 +
126 +
        Parameters
127 +
        ----------
128 +
        username : str
129 +
            The username to login as
130 +
        password : str
131 +
            The password for the given account
132 +
        """
133 +
        params = dict(username=username, password=password)
134 +
        r = self._session.request('POST', 'https://archive.gemini.edu/login/', params=params)
135 +
        if b'<P>Welcome, you are sucessfully logged in' not in r.content:
136 +
            logger.error('Unable to login, please check your credentials')
137 +
            return False
138 +
        return True
139 +
109 140
    @class_or_instance
110 141
    def query_region(self, coordinates, radius=0.3*units.deg):
111 142
        """
@@ -390,6 +421,21 @@
Loading
390 421
        js = response.json()
391 422
        return _gemini_json_to_table(js)
392 423
424 +
    def get_file(self, filename, *, download_dir='.', timeout=None):
425 +
        """
426 +
        Download the requested file to the current directory
427 +
428 +
        filename : str
429 +
            Name of the file to download
430 +
        download_dir : str, optional
431 +
            Name of the directory to download to
432 +
        timeout : int, optional
433 +
            Timeout of the request in milliseconds
434 +
        """
435 +
        url = "https://archive.gemini.edu/file/%s" % filename
436 +
        local_filepath = os.path.join(download_dir, filename)
437 +
        self._download_file(url=url, local_filepath=local_filepath, timeout=timeout)
438 +
393 439
394 440
def _gemini_json_to_table(json):
395 441
    """

@@ -11,6 +11,8 @@
Loading
11 11
12 12
from astropy.config import paths
13 13
14 +
from . import conf
15 +
14 16
15 17
def data_path(filename: str):
16 18
    """
@@ -32,7 +34,7 @@
Loading
32 34
    return os.path.join(data_dir, filename)
33 35
34 36
35 -
def get_json_species_ids(outfile='splat-species.json'):
37 +
def get_json_species_ids(outfile='splat-species.json', base_url=conf.base_url):
36 38
    """
37 39
    Uses BeautifulSoup to scrape the NRAO Splatalogue species
38 40
    selector form, and caches the result as JSON. The file
@@ -50,7 +52,7 @@
Loading
50 52
    """
51 53
    import bs4
52 54
53 -
    result = requests.get('https://www.cv.nrao.edu/php/splat/b.php')
55 +
    result = requests.get(f'{base_url}/b.php')
54 56
    page = bs4.BeautifulSoup(result.content, 'html5lib')
55 57
    # The ID needs to be checked periodically if Splatalogue is updated
56 58
    sid = page.findAll('select', attrs={'id': 'speciesselectbox'})[0]

@@ -35,8 +35,8 @@
Loading
35 35
        object_name : str
36 36
            The target you want radial velocities data
37 37
38 -
        Return
39 -
        ------
38 +
        Returns
39 +
        -------
40 40
        response : a ``requests.Response`` from DACE
41 41
        """
42 42
        return self._request("GET", ''.join([self.__DACE_URL, self.__RADIAL_VELOCITIES_ENDPOINT, object_name]),

@@ -19,8 +19,11 @@
Loading
19 19
    slap_url = _config.ConfigItem(
20 20
        'https://find.nrao.edu/splata-slap/slap',
21 21
        'Splatalogue SLAP interface URL (not used).')
22 +
    base_url = 'https://splatalogue.online'
22 23
    query_url = _config.ConfigItem(
23 -
        'https://www.cv.nrao.edu/php/splat/c_export.php',
24 +
        f'{base_url}/c_export.php',
25 +
        # defunct as of Sep 4, 2020?  (keeping because this is likely the true host)
26 +
        # 'https://www.cv.nrao.edu/php/splat/c_export.php',
24 27
        'Splatalogue web interface URL.')
25 28
    timeout = _config.ConfigItem(
26 29
        60,

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Everything is accounted for!

No changes detected that need to be reviewed.
What changes does Codecov check for?
Lines, not adjusted in diff, that have changed coverage data.
Files that introduced coverage data that had none before.
Files that have missing coverage data that once were tracked.

37 Commits

Hiding 1 contexual commits
Hiding 7 contexual commits
+71
+67
+4
Hiding 1 contexual commits Hiding 1 contexual commits
+2
+2
Hiding 4 contexual commits
+16
+7
+9
-11
-8
-3
Hiding 2 contexual commits Hiding 4 contexual commits
+11
+8
+3
Hiding 3 contexual commits
+3 Files
+220
+196
+24
Hiding 2 contexual commits
-3 Files
-231
-204
-27
Pull Request Base Commit
Files Coverage
astroquery 0.11% 64.03%
Project Totals (200 files) 64.03%
Loading