simonw / datasette

@@ -428,8 +428,10 @@
Loading
428 428
        )
429 429
        os.chdir(datasette_dir)
430 430
        if metadata_content:
431 -
            open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
432 -
        open("Dockerfile", "w").write(dockerfile)
431 +
            with open("metadata.json", "w") as fp:
432 +
                fp.write(json.dumps(metadata_content, indent=2))
433 +
        with open("Dockerfile", "w") as fp:
434 +
            fp.write(dockerfile)
433 435
        for path, filename in zip(file_paths, file_names):
434 436
            link_or_copy(path, os.path.join(datasette_dir, filename))
435 437
        if template_dir:

@@ -212,7 +212,7 @@
Loading
212 212
            and (config_dir / "inspect-data.json").exists()
213 213
            and not inspect_data
214 214
        ):
215 -
            inspect_data = json.load((config_dir / "inspect-data.json").open())
215 +
            inspect_data = json.loads((config_dir / "inspect-data.json").read_text())
216 216
            if immutables is None:
217 217
                immutable_filenames = [i["file"] for i in inspect_data.values()]
218 218
                immutables = [
@@ -269,7 +269,7 @@
Loading
269 269
        if config_dir and (config_dir / "config.json").exists():
270 270
            raise StartupError("config.json should be renamed to settings.json")
271 271
        if config_dir and (config_dir / "settings.json").exists() and not config:
272 -
            config = json.load((config_dir / "settings.json").open())
272 +
            config = json.loads((config_dir / "settings.json").read_text())
273 273
        self._settings = dict(DEFAULT_SETTINGS, **(config or {}))
274 274
        self.renderers = {}  # File extension -> (renderer, can_render) functions
275 275
        self.version_note = version_note
@@ -450,11 +450,10 @@
Loading
450 450
451 451
    def app_css_hash(self):
452 452
        if not hasattr(self, "_app_css_hash"):
453 -
            self._app_css_hash = hashlib.sha1(
454 -
                open(os.path.join(str(app_root), "datasette/static/app.css"))
455 -
                .read()
456 -
                .encode("utf8")
457 -
            ).hexdigest()[:6]
453 +
            with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp:
454 +
                self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[
455 +
                    :6
456 +
                ]
458 457
        return self._app_css_hash
459 458
460 459
    async def get_canned_queries(self, database_name, actor):

@@ -125,13 +125,13 @@
Loading
125 125
@sqlite_extensions
126 126
def inspect(files, inspect_file, sqlite_extensions):
127 127
    app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
128 -
    if inspect_file == "-":
129 -
        out = sys.stdout
130 -
    else:
131 -
        out = open(inspect_file, "w")
132 128
    loop = asyncio.get_event_loop()
133 129
    inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions))
134 -
    out.write(json.dumps(inspect_data, indent=2))
130 +
    if inspect_file == "-":
131 +
        sys.stdout.write(json.dumps(inspect_data, indent=2))
132 +
    else:
133 +
        with open(inspect_file, "w") as fp:
134 +
            fp.write(json.dumps(inspect_data, indent=2))
135 135
136 136
137 137
async def inspect_(files, sqlite_extensions):
@@ -475,7 +475,8 @@
Loading
475 475
476 476
    inspect_data = None
477 477
    if inspect_file:
478 -
        inspect_data = json.load(open(inspect_file))
478 +
        with open(inspect_file) as fp:
479 +
            inspect_data = json.load(fp)
479 480
480 481
    metadata_data = None
481 482
    if metadata:

@@ -171,9 +171,11 @@
Loading
171 171
        os.chdir(tmp.name)
172 172
173 173
        if metadata_content:
174 -
            open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
174 +
            with open("metadata.json", "w") as fp:
175 +
                fp.write(json.dumps(metadata_content, indent=2))
175 176
176 -
        open("runtime.txt", "w").write("python-3.8.7")
177 +
        with open("runtime.txt", "w") as fp:
178 +
            fp.write("python-3.8.7")
177 179
178 180
        if branch:
179 181
            install = [
@@ -182,11 +184,11 @@
Loading
182 184
        else:
183 185
            install = ["datasette"] + list(install)
184 186
185 -
        open("requirements.txt", "w").write("\n".join(install))
187 +
        with open("requirements.txt", "w") as fp:
188 +
            fp.write("\n".join(install))
186 189
        os.mkdir("bin")
187 -
        open("bin/post_compile", "w").write(
188 -
            "datasette inspect --inspect-file inspect-data.json"
189 -
        )
190 +
        with open("bin/post_compile", "w") as fp:
191 +
            fp.write("datasette inspect --inspect-file inspect-data.json")
190 192
191 193
        extras = []
192 194
        if template_dir:
@@ -218,7 +220,8 @@
Loading
218 220
        procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format(
219 221
            quoted_files=quoted_files, extras=" ".join(extras)
220 222
        )
221 -
        open("Procfile", "w").write(procfile_cmd)
223 +
        with open("Procfile", "w") as fp:
224 +
            fp.write(procfile_cmd)
222 225
223 226
        for path, filename in zip(file_paths, file_names):
224 227
            link_or_copy(path, os.path.join(tmp.name, filename))

@@ -141,9 +141,11 @@
Loading
141 141
            if show_files:
142 142
                if os.path.exists("metadata.json"):
143 143
                    print("=== metadata.json ===\n")
144 -
                    print(open("metadata.json").read())
144 +
                    with open("metadata.json") as fp:
145 +
                        print(fp.read())
145 146
                print("\n==== Dockerfile ====\n")
146 -
                print(open("Dockerfile").read())
147 +
                with open("Dockerfile") as fp:
148 +
                    print(fp.read())
147 149
                print("\n====================\n")
148 150
149 151
            image_id = f"gcr.io/{project}/{name}"
Files Coverage
datasette 91.52%
Project Totals (34 files) 91.52%
1
coverage:
2
  status:
3
    project:
4
      default:
5
        informational: true
6
    patch:
7
      default:
8
        informational: true
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading