simonw / datasette

@@ -74,7 +74,7 @@
Loading
74 74
            raise Forbidden(action)
75 75
76 76
    async def check_permissions(self, request, permissions):
77 -
        "permissions is a list of (action, resource) tuples or 'action' strings"
77 +
        """permissions is a list of (action, resource) tuples or 'action' strings"""
78 78
        for permission in permissions:
79 79
            if isinstance(permission, str):
80 80
                action = permission
@@ -280,7 +280,7 @@
Loading
280 280
        except (sqlite3.OperationalError, InvalidSql) as e:
281 281
            raise DatasetteError(str(e), title="Invalid SQL", status=400)
282 282
283 -
        except (sqlite3.OperationalError) as e:
283 +
        except sqlite3.OperationalError as e:
284 284
            raise DatasetteError(str(e))
285 285
286 286
        except DatasetteError:
@@ -451,7 +451,7 @@
Loading
451 451
        except (sqlite3.OperationalError, InvalidSql) as e:
452 452
            raise DatasetteError(str(e), title="Invalid SQL", status=400)
453 453
454 -
        except (sqlite3.OperationalError) as e:
454 +
        except sqlite3.OperationalError as e:
455 455
            raise DatasetteError(str(e))
456 456
457 457
        except DatasetteError:

@@ -71,7 +71,7 @@
Loading
71 71
72 72
73 73
def urlsafe_components(token):
74 -
    "Splits token on commas and URL decodes each component"
74 +
    """Splits token on commas and URL decodes each component"""
75 75
    return [urllib.parse.unquote_plus(b) for b in token.split(",")]
76 76
77 77
@@ -451,7 +451,7 @@
Loading
451 451
452 452
453 453
def detect_primary_keys(conn, table):
454 -
    " Figure out primary keys for a table. "
454 +
    """Figure out primary keys for a table."""
455 455
    columns = table_column_details(conn, table)
456 456
    pks = [column for column in columns if column.is_pk]
457 457
    pks.sort(key=lambda column: column.is_pk)
@@ -521,7 +521,7 @@
Loading
521 521
522 522
523 523
def detect_fts(conn, table):
524 -
    "Detect if table has a corresponding FTS virtual table and return it"
524 +
    """Detect if table has a corresponding FTS virtual table and return it"""
525 525
    rows = conn.execute(detect_fts_sql(table)).fetchall()
526 526
    if len(rows) == 0:
527 527
        return None
@@ -620,7 +620,7 @@
Loading
620 620
621 621
622 622
def is_url(value):
623 -
    "Must start with http:// or https:// and contain JUST a URL"
623 +
    """Must start with http:// or https:// and contain JUST a URL"""
624 624
    if not isinstance(value, str):
625 625
        return False
626 626
    if not value.startswith("http://") and not value.startswith("https://"):
@@ -863,14 +863,14 @@
Loading
863 863
        return len(self._data)
864 864
865 865
    def get(self, name, default=None):
866 -
        "Return first value in the list, if available"
866 +
        """Return first value in the list, if available"""
867 867
        try:
868 868
            return self._data.get(name)[0]
869 869
        except (KeyError, TypeError):
870 870
            return default
871 871
872 872
    def getlist(self, name):
873 -
        "Return full list"
873 +
        """Return full list"""
874 874
        return self._data.get(name) or []
875 875
876 876
@@ -967,7 +967,7 @@
Loading
967 967
968 968
969 969
async def check_visibility(datasette, actor, action, resource, default=True):
970 -
    "Returns (visible, private) - visible = can you see it, private = can others see it too"
970 +
    """Returns (visible, private) - visible = can you see it, private = can others see it too"""
971 971
    visible = await datasette.permission_allowed(
972 972
        actor,
973 973
        action,
@@ -975,7 +975,7 @@
Loading
975 975
        default=default,
976 976
    )
977 977
    if not visible:
978 -
        return (False, False)
978 +
        return False, False
979 979
    private = not await datasette.permission_allowed(
980 980
        None,
981 981
        action,
@@ -986,7 +986,7 @@
Loading
986 986
987 987
988 988
def resolve_env_secrets(config, environ):
989 -
    'Create copy that recursively replaces {"$env": "NAME"} with values from environ'
989 +
    """Create copy that recursively replaces {"$env": "NAME"} with values from environ"""
990 990
    if isinstance(config, dict):
991 991
        if list(config.keys()) == ["$env"]:
992 992
            return environ.get(list(config.values())[0])
@@ -1023,7 +1023,7 @@
Loading
1023 1023
1024 1024
1025 1025
async def initial_path_for_datasette(datasette):
1026 -
    "Return suggested path for opening this Datasette, based on number of DBs and tables"
1026 +
    """Return suggested path for opening this Datasette, based on number of DBs and tables"""
1027 1027
    databases = dict([p for p in datasette.databases.items() if p[0] != "_internal"])
1028 1028
    if len(databases) == 1:
1029 1029
        db_name = next(iter(databases.keys()))

@@ -429,7 +429,7 @@
Loading
429 429
            return m
430 430
431 431
    def plugin_config(self, plugin_name, database=None, table=None, fallback=True):
432 -
        "Return config for plugin, falling back from specified database/table"
432 +
        """Return config for plugin, falling back from specified database/table"""
433 433
        plugins = self.metadata(
434 434
            "plugins", database=database, table=table, fallback=fallback
435 435
        )
@@ -523,7 +523,7 @@
Loading
523 523
            return []
524 524
525 525
    async def permission_allowed(self, actor, action, resource=None, default=False):
526 -
        "Check permissions using the permissions_allowed plugin hook"
526 +
        """Check permissions using the permissions_allowed plugin hook"""
527 527
        result = None
528 528
        for check in pm.hook.permission_allowed(
529 529
            datasette=self,
@@ -570,7 +570,7 @@
Loading
570 570
        )
571 571
572 572
    async def expand_foreign_keys(self, database, table, column, values):
573 -
        "Returns dict mapping (column, value) -> label"
573 +
        """Returns dict mapping (column, value) -> label"""
574 574
        labeled_fks = {}
575 575
        db = self.databases[database]
576 576
        foreign_keys = await db.foreign_keys_for_table(table)
@@ -613,7 +613,7 @@
Loading
613 613
        return url
614 614
615 615
    def _register_custom_units(self):
616 -
        "Register any custom units defined in the metadata.json with Pint"
616 +
        """Register any custom units defined in the metadata.json with Pint"""
617 617
        for unit in self.metadata("custom_units") or []:
618 618
            ureg.define(unit)
619 619
@@ -730,7 +730,7 @@
Loading
730 730
        return {"actor": request.actor}
731 731
732 732
    def table_metadata(self, database, table):
733 -
        "Fetch table-specific metadata."
733 +
        """Fetch table-specific metadata."""
734 734
        return (
735 735
            (self.metadata("databases") or {})
736 736
            .get(database, {})
@@ -739,7 +739,7 @@
Loading
739 739
        )
740 740
741 741
    def _register_renderers(self):
742 -
        """ Register output renderers which output data in custom formats. """
742 +
        """Register output renderers which output data in custom formats."""
743 743
        # Built-in renderers
744 744
        self.renderers["json"] = (json_renderer, lambda: True)
745 745
@@ -880,7 +880,7 @@
Loading
880 880
        return output
881 881
882 882
    def app(self):
883 -
        "Returns an ASGI app function that serves the whole of Datasette"
883 +
        """Returns an ASGI app function that serves the whole of Datasette"""
884 884
        routes = []
885 885
886 886
        for routes_to_add in pm.hook.register_routes():
@@ -1287,7 +1287,7 @@
Loading
1287 1287
    )
1288 1288
1289 1289
1290 -
_curly_re = re.compile(r"(\{.*?\})")
1290 +
_curly_re = re.compile(r"({.*?})")
1291 1291
1292 1292
1293 1293
def route_pattern_from_filepath(filepath):

@@ -27,7 +27,7 @@
Loading
27 27
28 28
29 29
def json_renderer(args, data, view_name):
30 -
    """ Render a response as JSON """
30 +
    """Render a response as JSON"""
31 31
    status_code = 200
32 32
    # Handle the _json= parameter which may modify data["rows"]
33 33
    json_cols = []

@@ -59,12 +59,10 @@
Loading
59 59
60 60
    @property
61 61
    def headers(self):
62 -
        return dict(
63 -
            [
64 -
                (k.decode("latin-1").lower(), v.decode("latin-1"))
65 -
                for k, v in self.scope.get("headers") or []
66 -
            ]
67 -
        )
62 +
        return {
63 +
            k.decode("latin-1").lower(): v.decode("latin-1")
64 +
            for k, v in self.scope.get("headers") or []
65 +
        }
68 66
69 67
    @property
70 68
    def host(self):
@@ -115,7 +113,7 @@
Loading
115 113
116 114
    @classmethod
117 115
    def fake(cls, path_with_query_string, method="GET", scheme="http"):
118 -
        "Useful for constructing Request objects for tests"
116 +
        """Useful for constructing Request objects for tests"""
119 117
        path, _, query_string = path_with_query_string.partition("?")
120 118
        scope = {
121 119
            "http_version": "1.1",
@@ -167,9 +165,7 @@
Loading
167 165
168 166
    async def asgi_send(self, send):
169 167
        # Remove any existing content-type header
170 -
        headers = dict(
171 -
            [(k, v) for k, v in self.headers.items() if k.lower() != "content-type"]
172 -
        )
168 +
        headers = {k: v for k, v in self.headers.items() if k.lower() != "content-type"}
173 169
        headers["content-type"] = self.content_type
174 170
        await send(
175 171
            {
@@ -240,7 +236,7 @@
Loading
240 236
async def asgi_start(send, status, headers=None, content_type="text/plain"):
241 237
    headers = headers or {}
242 238
    # Remove any existing content-type header
243 -
    headers = dict([(k, v) for k, v in headers.items() if k.lower() != "content-type"])
239 +
    headers = {k: v for k, v in headers.items() if k.lower() != "content-type"}
244 240
    headers["content-type"] = content_type
245 241
    await send(
246 242
        {

@@ -152,7 +152,7 @@
Loading
152 152
153 153
@cli.group()
154 154
def publish():
155 -
    "Publish specified SQLite database files to the internet along with a Datasette-powered interface and API"
155 +
    """Publish specified SQLite database files to the internet along with a Datasette-powered interface and API"""
156 156
    pass
157 157
158 158
@@ -168,7 +168,7 @@
Loading
168 168
    help="Path to directory containing custom plugins",
169 169
)
170 170
def plugins(all, plugins_dir):
171 -
    "List currently available plugins"
171 +
    """List currently available plugins"""
172 172
    app = Datasette([], plugins_dir=plugins_dir)
173 173
    click.echo(json.dumps(app._plugins(all=all), indent=4))
174 174
@@ -244,7 +244,7 @@
Loading
244 244
    port,
245 245
    **extra_metadata,
246 246
):
247 -
    "Package specified SQLite files into a new datasette Docker container"
247 +
    """Package specified SQLite files into a new datasette Docker container"""
248 248
    if not shutil.which("docker"):
249 249
        click.secho(
250 250
            ' The package command requires "docker" to be installed and configured ',
@@ -284,7 +284,7 @@
Loading
284 284
    "-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version"
285 285
)
286 286
def install(packages, upgrade):
287 -
    "Install Python packages - e.g. Datasette plugins - into the same environment as Datasette"
287 +
    """Install Python packages - e.g. Datasette plugins - into the same environment as Datasette"""
288 288
    args = ["pip", "install"]
289 289
    if upgrade:
290 290
        args += ["--upgrade"]
@@ -297,7 +297,7 @@
Loading
297 297
@click.argument("packages", nargs=-1, required=True)
298 298
@click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation")
299 299
def uninstall(packages, yes):
300 -
    "Uninstall Python packages (e.g. plugins) from the Datasette environment"
300 +
    """Uninstall Python packages (e.g. plugins) from the Datasette environment"""
301 301
    sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else [])
302 302
    run_module("pip", run_name="__main__")
303 303

@@ -162,10 +162,8 @@
Loading
162 162
                )
163 163
                num_distinct_values = len(distinct_values)
164 164
                if (
165 -
                    num_distinct_values
166 -
                    and num_distinct_values > 1
165 +
                    1 < num_distinct_values < row_count
167 166
                    and num_distinct_values <= facet_size
168 -
                    and num_distinct_values < row_count
169 167
                    # And at least one has n > 1
170 168
                    and any(r["n"] > 1 for r in distinct_values)
171 169
                ):

@@ -87,7 +87,7 @@
Loading
87 87
    async def display_columns_and_rows(
88 88
        self, database, table, description, rows, link_column=False, truncate_cells=0
89 89
    ):
90 -
        "Returns columns, rows for specified table - including fancy foreign key treatment"
90 +
        """Returns columns, rows for specified table - including fancy foreign key treatment"""
91 91
        db = self.ds.databases[database]
92 92
        table_metadata = self.ds.table_metadata(database, table)
93 93
        column_details = {col.name: col for col in await db.table_column_details(table)}
@@ -743,7 +743,7 @@
Loading
743 743
        # Pagination next link
744 744
        next_value = None
745 745
        next_url = None
746 -
        if len(rows) > page_size and page_size > 0:
746 +
        if 0 < page_size < len(rows):
747 747
            if is_view:
748 748
                next_value = int(_next or 0) + page_size
749 749
            else:

@@ -208,7 +208,7 @@
Loading
208 208
        self.ureg = ureg
209 209
210 210
    def lookups(self):
211 -
        "Yields (lookup, display, no_argument) pairs"
211 +
        """Yields (lookup, display, no_argument) pairs"""
212 212
        for filter in self._filters:
213 213
            yield filter.key, filter.display, filter.no_argument
214 214
@@ -233,7 +233,7 @@
Loading
233 233
        return f"where {s}"
234 234
235 235
    def selections(self):
236 -
        "Yields (column, lookup, value) tuples"
236 +
        """Yields (column, lookup, value) tuples"""
237 237
        for key, value in self.pairs:
238 238
            if "__" in key:
239 239
                column, lookup = key.rsplit("__", 1)
@@ -246,7 +246,7 @@
Loading
246 246
        return bool(self.pairs)
247 247
248 248
    def convert_unit(self, column, value):
249 -
        "If the user has provided a unit in the query, convert it into the column unit, if present."
249 +
        """If the user has provided a unit in the query, convert it into the column unit, if present."""
250 250
        if column not in self.units:
251 251
            return value
252 252

@@ -15,7 +15,7 @@
Loading
15 15
16 16
17 17
def inspect_hash(path):
18 -
    " Calculate the hash of a database, efficiently. "
18 +
    """Calculate the hash of a database, efficiently."""
19 19
    m = hashlib.sha256()
20 20
    with path.open("rb") as fp:
21 21
        while True:
@@ -28,14 +28,14 @@
Loading
28 28
29 29
30 30
def inspect_views(conn):
31 -
    " List views in a database. "
31 +
    """List views in a database."""
32 32
    return [
33 33
        v[0] for v in conn.execute('select name from sqlite_master where type = "view"')
34 34
    ]
35 35
36 36
37 37
def inspect_tables(conn, database_metadata):
38 -
    " List tables and their row counts, excluding uninteresting tables. "
38 +
    """List tables and their row counts, excluding uninteresting tables."""
39 39
    tables = {}
40 40
    table_names = [
41 41
        r["name"]

@@ -7,108 +7,108 @@
Loading
7 7
8 8
@hookspec
9 9
def startup(datasette):
10 -
    "Fires directly after Datasette first starts running"
10 +
    """Fires directly after Datasette first starts running"""
11 11
12 12
13 13
@hookspec
14 14
def asgi_wrapper(datasette):
15 -
    "Returns an ASGI middleware callable to wrap our ASGI application with"
15 +
    """Returns an ASGI middleware callable to wrap our ASGI application with"""
16 16
17 17
18 18
@hookspec
19 19
def prepare_connection(conn, database, datasette):
20 -
    "Modify SQLite connection in some way e.g. register custom SQL functions"
20 +
    """Modify SQLite connection in some way e.g. register custom SQL functions"""
21 21
22 22
23 23
@hookspec
24 24
def prepare_jinja2_environment(env):
25 -
    "Modify Jinja2 template environment e.g. register custom template tags"
25 +
    """Modify Jinja2 template environment e.g. register custom template tags"""
26 26
27 27
28 28
@hookspec
29 29
def extra_css_urls(template, database, table, columns, view_name, request, datasette):
30 -
    "Extra CSS URLs added by this plugin"
30 +
    """Extra CSS URLs added by this plugin"""
31 31
32 32
33 33
@hookspec
34 34
def extra_js_urls(template, database, table, columns, view_name, request, datasette):
35 -
    "Extra JavaScript URLs added by this plugin"
35 +
    """Extra JavaScript URLs added by this plugin"""
36 36
37 37
38 38
@hookspec
39 39
def extra_body_script(
40 40
    template, database, table, columns, view_name, request, datasette
41 41
):
42 -
    "Extra JavaScript code to be included in <script> at bottom of body"
42 +
    """Extra JavaScript code to be included in <script> at bottom of body"""
43 43
44 44
45 45
@hookspec
46 46
def extra_template_vars(
47 47
    template, database, table, columns, view_name, request, datasette
48 48
):
49 -
    "Extra template variables to be made available to the template - can return dict or callable or awaitable"
49 +
    """Extra template variables to be made available to the template - can return dict or callable or awaitable"""
50 50
51 51
52 52
@hookspec
53 53
def publish_subcommand(publish):
54 -
    "Subcommands for 'datasette publish'"
54 +
    """Subcommands for 'datasette publish'"""
55 55
56 56
57 57
@hookspec(firstresult=True)
58 58
def render_cell(value, column, table, database, datasette):
59 -
    "Customize rendering of HTML table cell values"
59 +
    """Customize rendering of HTML table cell values"""
60 60
61 61
62 62
@hookspec
63 63
def register_output_renderer(datasette):
64 -
    "Register a renderer to output data in a different format"
64 +
    """Register a renderer to output data in a different format"""
65 65
66 66
67 67
@hookspec
68 68
def register_facet_classes():
69 -
    "Register Facet subclasses"
69 +
    """Register Facet subclasses"""
70 70
71 71
72 72
@hookspec
73 73
def register_routes():
74 -
    "Register URL routes: return a list of (regex, view_function) pairs"
74 +
    """Register URL routes: return a list of (regex, view_function) pairs"""
75 75
76 76
77 77
@hookspec
78 78
def actor_from_request(datasette, request):
79 -
    "Return an actor dictionary based on the incoming request"
79 +
    """Return an actor dictionary based on the incoming request"""
80 80
81 81
82 82
@hookspec
83 83
def permission_allowed(datasette, actor, action, resource):
84 -
    "Check if actor is allowed to perfom this action - return True, False or None"
84 +
    """Check if actor is allowed to perfom this action - return True, False or None"""
85 85
86 86
87 87
@hookspec
88 88
def canned_queries(datasette, database, actor):
89 -
    "Return a dictonary of canned query definitions or an awaitable function that returns them"
89 +
    """Return a dictonary of canned query definitions or an awaitable function that returns them"""
90 90
91 91
92 92
@hookspec
93 93
def register_magic_parameters(datasette):
94 -
    "Return a list of (name, function) magic parameter functions"
94 +
    """Return a list of (name, function) magic parameter functions"""
95 95
96 96
97 97
@hookspec
98 98
def forbidden(datasette, request, message):
99 -
    "Custom response for a 403 forbidden error"
99 +
    """Custom response for a 403 forbidden error"""
100 100
101 101
102 102
@hookspec
103 103
def menu_links(datasette, actor):
104 -
    "Links for the navigation menu"
104 +
    """Links for the navigation menu"""
105 105
106 106
107 107
@hookspec
108 108
def table_actions(datasette, actor, database, table):
109 -
    "Links for the table actions menu"
109 +
    """Links for the table actions menu"""
110 110
111 111
112 112
@hookspec
113 113
def database_actions(datasette, actor, database):
114 -
    "Links for the database actions menu"
114 +
    """Links for the database actions menu"""
Files Coverage
datasette 91.55%
Project Totals (32 files) 91.55%
1
coverage:
2
  status:
3
    project:
4
      default:
5
        informational: true
6
    patch:
7
      default:
8
        informational: true
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading