.credo.exs .dialyzer_ignore.exs .formatter.exs LICENSE bench/xgit/repository/working_tree/parse_index_file/from_iodevice.exs config/config.exs config/dev.exs config/prod.exs config/test.exs coveralls.json lib/xgit.ex lib/xgit/commit.ex lib/xgit/config.ex lib/xgit/config_entry.ex lib/xgit/config_file.ex lib/xgit/content_source.ex lib/xgit/dir_cache.ex lib/xgit/file_content_source.ex lib/xgit/file_mode.ex lib/xgit/file_path.ex lib/xgit/object.ex lib/xgit/object_id.ex lib/xgit/object_type.ex lib/xgit/person_ident.ex lib/xgit/ref.ex lib/xgit/repository.ex lib/xgit/repository/in_memory.ex lib/xgit/repository/invalid_repository_error.ex lib/xgit/repository/on_disk.ex lib/xgit/repository/plumbing.ex lib/xgit/repository/storage.ex lib/xgit/repository/test/config_test.ex lib/xgit/repository/test/ref_test.ex lib/xgit/repository/working_tree.ex lib/xgit/tag.ex lib/xgit/tree.ex lib/xgit/util/comparison.ex lib/xgit/util/file_utils.ex lib/xgit/util/force_coverage.ex lib/xgit/util/nb.ex lib/xgit/util/observed_file.ex lib/xgit/util/parse_charlist.ex lib/xgit/util/parse_decimal.ex lib/xgit/util/parse_header.ex lib/xgit/util/shared_test_case.ex lib/xgit/util/trailing_hash_device.ex lib/xgit/util/unzip_stream.ex mix.exs mix.lock test/fixtures/LICENSE_blob.zip test/fixtures/test_content.zip test/support/folder_diff.ex test/support/not_valid.ex test/support/test/on_disk_repo_test_case.ex test/support/test/temp_dir_test_case.ex test/support/test/test_file_utils.ex test/test_helper.exs test/xgit/commit_test.exs test/xgit/config_entry_test.exs test/xgit/config_file_test.exs test/xgit/config_test.exs test/xgit/content_source_test.exs test/xgit/dir_cache/entry_test.exs test/xgit/dir_cache/from_iodevice_test.exs test/xgit/dir_cache/to_iodevice_test.exs test/xgit/dir_cache_test.exs test/xgit/file_content_source_test.exs test/xgit/file_mode_test.exs test/xgit/file_path_test.exs test/xgit/object_id_test.exs test/xgit/object_test.exs test/xgit/object_type_test.exs test/xgit/person_ident_test.exs test/xgit/ref_test.exs test/xgit/repository/default_working_tree_test.exs test/xgit/repository/in_memory/config_test.exs test/xgit/repository/in_memory/get_object_test.exs test/xgit/repository/in_memory/has_all_object_ids_test.exs test/xgit/repository/in_memory/put_loose_object_test.exs test/xgit/repository/in_memory/ref_test.exs test/xgit/repository/on_disk/config_test.exs test/xgit/repository/on_disk/create_test.exs test/xgit/repository/on_disk/get_object_test.exs test/xgit/repository/on_disk/has_all_object_ids_test.exs test/xgit/repository/on_disk/put_loose_object_test.exs test/xgit/repository/on_disk/ref_test.exs test/xgit/repository/on_disk_test.exs test/xgit/repository/plumbing/cat_file_commit_test.exs test/xgit/repository/plumbing/cat_file_tag_test.exs test/xgit/repository/plumbing/cat_file_test.exs test/xgit/repository/plumbing/cat_file_tree_test.exs test/xgit/repository/plumbing/commit_tree_test.exs test/xgit/repository/plumbing/delete_symbolic_ref_test.exs test/xgit/repository/plumbing/get_symbolic_ref_test.exs test/xgit/repository/plumbing/hash_object_test.exs test/xgit/repository/plumbing/ls_files_stage_test.exs test/xgit/repository/plumbing/put_symbolic_ref_test.exs test/xgit/repository/plumbing/read_tree_test.exs test/xgit/repository/plumbing/update_info_cache_info_test.exs test/xgit/repository/plumbing/update_ref_test.exs test/xgit/repository/plumbing/write_tree_test.exs test/xgit/repository/storage_test.exs test/xgit/repository/tag_test.exs test/xgit/repository/working_tree/dir_cache_test.exs test/xgit/repository/working_tree/read_tree_test.exs test/xgit/repository/working_tree/reset_dir_cache_test.exs test/xgit/repository/working_tree/update_dir_cache_test.exs test/xgit/repository/working_tree/write_tree_test.exs test/xgit/repository/working_tree_test.exs test/xgit/support/folder_diff_test.exs test/xgit/tag_test.exs test/xgit/tree/entry_test.exs test/xgit/tree_test.exs test/xgit/util/file_utils_test.exs test/xgit/util/nb_test.exs test/xgit/util/observed_file_test.exs test/xgit/util/parse_charlist_test.exs test/xgit/util/parse_decimal_test.exs test/xgit/util/parse_header_test.exs test/xgit/util/trailing_hash_device_test.exs test/xgit/util/unzip_stream_test.exs <<<<<< network # path=./.github/workflows/test-coverage.yml on: push jobs: test-coverage: runs-on: ubuntu-latest name: OTP ${{matrix.otp}} / Elixir ${{matrix.elixir}} strategy: matrix: otp: [21.3.8.10, 22.2] elixir: [1.8.2, 1.9.4, 1.10.0] exclude: - {otp: "21.3.8.10", elixir: "1.10.0"} env: MIX_ENV: test steps: - uses: actions/checkout@v1.0.0 - uses: actions/setup-elixir@v1.0.0 with: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - uses: actions/cache@v1 id: cache-mix-deps with: path: deps key: ${{matrix.otp}}-${{matrix.elixir}}-mix-${{hashFiles(format('{0}{1}', github.workspace, '/mix.exs'))}}-${{hashFiles(format('{0}{1}', github.workspace, '/mix.lock'))}} restore-keys: | ${{matrix.otp}}-${{matrix.elixir}}-mix- - run: mix deps.get if: steps.cache-mix-deps.outputs.cache-hit != 'true' - run: mix deps.compile if: steps.cache-mix-deps.outputs.cache-hit != 'true' - run: mix coveralls.json - name: Upload coverage to CodeCov uses: codecov/codecov-action@v1.0.3 with: token: ${{secrets.CODECOV_TOKEN}} <<<<<< EOF # path=./cover/excoveralls.json {"source_files":[{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,24,null,13,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,134,null,3,38,18,165,1,null,null,null,null,38,33,29,25,23,15,13,11,null,5,4,4,2,8,2,2,null,null,null,null,162,137,133,null,4,25,null,null,null,null,null,null,18,15,null,12,9,5,3,null,3,3,3,4,2,null,null,null,null,5,4,null,2,null,2,1,null,null,null,null,null,null,165,165,87,null,78,null,null,165,null,null,null,50,null,null,null,null,180,174,172,171,78,null,75,70,null,67,67,66,65,null,null,null,null,null,null,null,6,2,1,93,8,3,1,1,null,null,null,1,null,null,null,3,null,null,1033,null,2,null,172,null,69,null,null,9,null,null,81,null,81,81,null,81,null,81,null,81,null,81,null,81,25,25,11,6,null,null,null,null,81,25,56,null,null,null,56,null,null,14,null,59,null,null,6,null,null,null,null,36,333,34,422,32,31,null,27,26,null,2,2,1,4,1,null,null,null,null,null,null,null,12,null,null,3,null,2,1,null],"name":"lib/xgit/object.ex","source":"# Copyright (C) 2008-2010, Google Inc.\n# Copyright (C) 2008, Shawn O. Pearce \n# and other copyright owners as documented in the project's IP log.\n#\n# Elixir adaptation from jgit file:\n# org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java\n#\n# Copyright (C) 2019, Eric Scouten \n#\n# This program and the accompanying materials are made available\n# under the terms of the Eclipse Distribution License v1.0 which\n# accompanies this distribution, is reproduced below, and is\n# available at http://www.eclipse.org/org/documents/edl-v10.php\n#\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or\n# without modification, are permitted provided that the following\n# conditions are met:\n#\n# - Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#\n# - Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following\n# disclaimer in the documentation and/or other materials provided\n# with the distribution.\n#\n# - Neither the name of the Eclipse Foundation, Inc. nor the\n# names of its contributors may be used to endorse or promote\n# products derived from this software without specific prior\n# written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND\n# CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,\n# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\ndefmodule Xgit.Object do\n @moduledoc ~S\"\"\"\n Describes a single object stored (or about to be stored) in a git repository.\n\n This struct is constructed, modified, and shared as a working description of\n how to find and describe an object before it gets written to a repository.\n \"\"\"\n use Xgit.ObjectType\n\n alias Xgit.ContentSource\n alias Xgit.FileMode\n alias Xgit.FilePath\n alias Xgit.ObjectId\n alias Xgit.PersonIdent\n alias Xgit.Util.ParseCharlist\n alias Xgit.Util.ParseDecimal\n\n import Xgit.Util.ForceCoverage\n import Xgit.Util.ParseHeader, only: [next_header: 1]\n\n @typedoc ~S\"\"\"\n This struct describes a single object stored or about to be stored in a git\n repository.\n\n ## Struct Members\n\n * `:type`: the object's type (`:blob`, `:tree`, `:commit`, or `:tag`)\n * `:content`: how to obtain the content (see `Xgit.ContentSource`)\n * `:size`: size (in bytes) of the object or `:unknown`\n * `:id`: object ID (40 chars hex) of the object or `:unknown`\n \"\"\"\n @type t :: %__MODULE__{\n type: ObjectType.t(),\n content: ContentSource.t(),\n size: non_neg_integer() | :unknown,\n id: ObjectId.t() | :unknown\n }\n\n @enforce_keys [:type, :content]\n defstruct [:type, :content, size: :unknown, id: :unknown]\n\n @doc ~S\"\"\"\n Return `true` if the struct describes a valid object.\n\n _IMPORTANT:_ This validation _only_ verifies that the struct itself is valid.\n It does not inspect the content of the object. That check can be performed by\n `check/2`.\n \"\"\"\n @spec valid?(object :: any) :: boolean\n def valid?(object)\n\n def valid?(%__MODULE__{type: type, content: content, size: size, id: id})\n when is_object_type(type) and is_integer(size) and size >= 0,\n do: ObjectId.valid?(id) && content != nil && ContentSource.impl_for(content) != nil\n\n def valid?(_), do: cover(false)\n\n @typedoc ~S\"\"\"\n Error codes which can be returned by `check/2`.\n \"\"\"\n @type check_reason ::\n :invalid_type\n | :no_tree_header\n | :invalid_tree\n | :invalid_parent\n | :no_author\n | :no_committer\n | :no_object_header\n | :invalid_object\n | :no_type_header\n | :invalid_tagger\n | :bad_date\n | :bad_email\n | :missing_email\n | :missing_space_before_date\n | :bad_time_zone\n | :invalid_file_mode\n | :truncated_in_name\n | :duplicate_entry_names\n | :incorrectly_sorted\n | :truncated_in_object_id\n | :null_sha1\n | :invalid_mode\n\n @doc ~S\"\"\"\n Verify that a proposed object is valid.\n\n This function performs a detailed check on the _content_ of the object.\n For a simpler verification that the `Object` struct is _itself_\n valid, see `valid?/1`.\n\n Verifications made by this function only check that the fields of an object are\n formatted correctly. The object ID checksum of the object is not verified, and\n connectivity links between objects are also not verified. It's assumed that\n the caller can provide both of these validations on its own.\n\n ## Options\n\n By default, this function will only enforce Posix file name restrictions.\n\n * `:macosx?`: `true` to also enforce Mac OS X path name restrictions\n * `:windows?`: `true` to also enforce Windows path name restrictions\n\n ## Return Value\n\n `:ok` if the object is successfully validated.\n\n `{:error, :invalid_type}` if the object's type is unknown.\n\n `{:error, :no_tree_header}` if the object is a commit but does not contain\n a valid tree header.\n\n `{:error, :invalid_tree}` if the object is a commit but the tree object ID\n is invalid.\n\n `{:error, :invalid_parent}` if the object is a commit but one of the `parent`\n headers is invalid.\n\n `{:error, :no_author}` if the object is a commit but there is no `author` header.\n\n `{:error, :no_committer}` if the object is a commit but there is no `committer` header.\n\n `{:error, :no_object_header}` if the object is a tag but there is no `object` header.\n\n `{:error, :invalid_object}` if the object is a tag but the object ID is invalid.\n\n `{:error, :no_type_header}` if the object is a tag but there is no `type` header.\n\n `{:error, :invalid_tagger}` if the object is a tag but one of the `tagger` headers\n is invalid.\n\n `{:error, :bad_date}` if the object is a tag or a commit but has a malformed date entry.\n\n `{:error, :bad_email}` if the object is a tag or a commit but has a malformed e-mail address.\n\n `{:error, :missing_email}` if the object is a tag or a commit but has a missing e-mail address\n where one is expected.\n\n `{:error, :missing_space_before_date}` if the object is a tag or a commit but\n has no space preceding the place where a date is expected.\n\n `{:error, :bad_time_zone}` if the object is a tag or a commit but has a malformed\n time zone entry.\n\n `{:error, :invalid_file_mode}` if the object is a tree but one of the file modes is invalid.\n\n `{:error, :truncated_in_name}` if the object is a tree but one of the file names is incomplete.\n\n `{:error, :duplicate_entry_names}` if the object is a tree and contains duplicate\n entry names.\n\n `{:error, :incorrectly_sorted}` if the object is a tree and the entries are not\n in alphabetical order.\n\n `{:error, :truncated_in_object_id}` if the object is a tree and one of the object IDs\n is invalid.\n\n `{:error, :null_sha1}` if the object is a tree and one of the object IDs is all zeros.\n\n `{:error, :invalid_mode}` if the object is a tree and one of the file modes is incomplete.\n\n See also error responses from `Xgit.FilePath.check_path/2` and\n `Xgit.FilePath.check_path_segment/2`.\n \"\"\"\n @spec check(object :: t(), windows?: boolean, macosx?: boolean) ::\n :ok\n | {:error, reason :: check_reason}\n | {:error, reason :: FilePath.check_path_reason()}\n | {:error, reason :: FilePath.check_path_segment_reason()}\n def check(object, opts \\\\ [])\n\n def check(%__MODULE__{type: :blob}, _opts), do: cover(:ok)\n def check(%__MODULE__{type: :commit} = object, _opts), do: check_commit(object)\n def check(%__MODULE__{type: :tag} = object, _opts), do: check_tag(object)\n def check(%__MODULE__{type: :tree} = object, opts), do: check_tree(object, opts)\n def check(%__MODULE__{type: _type}, _opts), do: cover({:error, :invalid_type})\n\n # -- commit specifics --\n\n defp check_commit(%__MODULE__{content: data}) when is_list(data) do\n with {:tree, {'tree', tree_id, data}} <- {:tree, next_header(data)},\n {:tree_id, {_tree_id_str, []}} <- {:tree_id, ObjectId.from_hex_charlist(tree_id)},\n {:parents, data} when is_list(data) <- {:parents, check_commit_parents(data)},\n {:author, {'author', author, data}} <- {:author, next_header(data)},\n {:author_id, :ok} <- {:author_id, check_person_ident(author)},\n {:committer, {'committer', committer, _data}} <- {:committer, next_header(data)},\n {:committer_id, :ok} <- {:committer_id, check_person_ident(committer)} do\n cover :ok\n else\n {:tree, _} -> cover {:error, :no_tree_header}\n {:tree_id, _} -> cover {:error, :invalid_tree}\n {:parents, _} -> cover {:error, :invalid_parent}\n {:author, _} -> cover {:error, :no_author}\n {:author_id, why} when is_atom(why) -> cover {:error, why}\n {:committer, _} -> cover {:error, :no_committer}\n {:committer_id, why} when is_atom(why) -> cover {:error, why}\n end\n end\n\n defp check_commit_parents(data) do\n with {'parent', parent_id, next_data} <- next_header(data),\n {:parent_id, {_parent_id, []}} <- {:parent_id, ObjectId.from_hex_charlist(parent_id)} do\n check_commit_parents(next_data)\n else\n {:parent_id, _} -> cover nil\n _ -> cover data\n end\n end\n\n # -- tag specifics --\n\n defp check_tag(%__MODULE__{content: data}) when is_list(data) do\n with {:object, {'object', object_id, data}} <- {:object, next_header(data)},\n {:object_id, {object_id, []}} when is_binary(object_id) <-\n {:object_id, ObjectId.from_hex_charlist(object_id)},\n {:type, {'type', _type, data}} <- {:type, next_header(data)},\n {:tag, {'tag', _tag, data}} <- {:tag, next_header(data)},\n {:tagger, data} when is_list(data) <- {:tagger, maybe_match_tagger(data)} do\n cover :ok\n else\n {:object, _} -> cover {:error, :no_object_header}\n {:object_id, _} -> cover {:error, :invalid_object}\n {:type, _} -> cover {:error, :no_type_header}\n {:tag, _} -> cover {:error, :no_tag_header}\n {:tagger, _} -> cover {:error, :invalid_tagger}\n end\n end\n\n defp maybe_match_tagger(data) do\n with {'tagger', tagger, next} when next != data <- next_header(data),\n {:valid_person_ident, %PersonIdent{}} <-\n {:valid_person_ident, PersonIdent.from_byte_list(tagger)} do\n cover next\n else\n {:valid_person_ident, _} -> cover nil\n _ -> cover data\n end\n end\n\n # -- tree specifics --\n\n defp check_tree(%__MODULE__{content: data}, opts) when is_list(data) and is_list(opts) do\n maybe_normalized_paths =\n if Keyword.get(opts, :windows?) || Keyword.get(opts, :macosx?) do\n MapSet.new()\n else\n cover nil\n end\n\n check_next_tree_entry(data, maybe_normalized_paths, [], FileMode.regular_file(), opts)\n end\n\n defp check_next_tree_entry([], _maybe_normalized_paths, _previous_name, _previous_mode, _opts),\n do: cover(:ok)\n\n defp check_next_tree_entry(data, maybe_normalized_paths, previous_name, previous_mode, opts) do\n # Scan one entry then recurse to scan remaining entries.\n\n with {:file_mode, {:ok, file_mode, data}} <- {:file_mode, check_file_mode(data, 0)},\n {:file_mode, true} <- {:file_mode, FileMode.valid?(file_mode)},\n {:path_split, {path_segment, [0 | data]}} <- {:path_split, path_and_object_id(data)},\n {:path_valid, :ok} <- {:path_valid, FilePath.check_path_segment(path_segment, opts)},\n {:duplicate, false} <-\n {:duplicate, maybe_mapset_member?(maybe_normalized_paths, path_segment, opts)},\n {:duplicate, false} <- {:duplicate, duplicate_name?(path_segment, data)},\n {:sorted, true} <-\n {:sorted, correctly_sorted?(previous_name, previous_mode, path_segment, file_mode)},\n {raw_object_id, data} <- Enum.split(data, 20),\n {:object_id_length, 20} <- {:object_id_length, Enum.count(raw_object_id)},\n {:object_id_null, false} <- {:object_id_null, Enum.all?(raw_object_id, &(&1 == 0))} do\n check_next_tree_entry(\n data,\n maybe_put_path(maybe_normalized_paths, path_segment, opts),\n path_segment,\n file_mode,\n opts\n )\n else\n {:file_mode, {:error, reason}} -> cover {:error, reason}\n {:file_mode, _} -> cover {:error, :invalid_file_mode}\n {:path_split, _} -> cover {:error, :truncated_in_name}\n {:path_valid, {:error, reason}} -> cover {:error, reason}\n {:duplicate, _} -> cover {:error, :duplicate_entry_names}\n {:sorted, _} -> cover {:error, :incorrectly_sorted}\n {:object_id_length, _} -> cover {:error, :truncated_in_object_id}\n {:object_id_null, _} -> cover {:error, :null_sha1}\n end\n end\n\n defp check_file_mode([], _mode), do: cover({:error, :invalid_mode})\n\n defp check_file_mode([?\\s | data], mode), do: cover({:ok, mode, data})\n\n defp check_file_mode([?0 | _data], 0), do: cover({:error, :invalid_mode})\n\n defp check_file_mode([c | data], mode) when c >= ?0 and c <= ?7,\n do: check_file_mode(data, mode * 8 + (c - ?0))\n\n defp check_file_mode([_c | _data], _mode), do: cover({:error, :invalid_mode})\n\n defp path_and_object_id(data), do: Enum.split_while(data, &(&1 != 0))\n\n defp maybe_mapset_member?(nil, _path_segment, _opts), do: cover(false)\n\n defp maybe_mapset_member?(mapset, path_segment, opts),\n do: MapSet.member?(mapset, normalize(path_segment, Keyword.get(opts, :macosx?, false)))\n\n defp duplicate_name?(this_name, data) do\n data = Enum.drop(data, 20)\n\n {mode_str, data} = Enum.split_while(data, &(&1 != ?\\s))\n mode = parse_octal(mode_str)\n\n data = Enum.drop(data, 1)\n\n {next_name, data} = Enum.split_while(data, &(&1 != 0))\n\n data = Enum.drop(data, 1)\n\n compare = FilePath.compare_same_name(this_name, next_name, mode)\n\n cond do\n Enum.empty?(mode_str) or Enum.empty?(next_name) -> cover false\n compare == :lt -> cover false\n compare == :eq -> cover true\n compare == :gt -> duplicate_name?(this_name, data)\n end\n end\n\n defp parse_octal(data) do\n case Integer.parse(to_string(data), 8) do\n {n, _} when is_integer(n) -> cover n\n :error -> cover 0\n end\n end\n\n defp correctly_sorted?([], _previous_mode, _this_name, _this_mode), do: cover(true)\n\n defp correctly_sorted?(previous_name, previous_mode, this_name, this_mode),\n do: FilePath.compare(previous_name, previous_mode, this_name, this_mode) != :gt\n\n defp maybe_put_path(nil, _path_segment, _opts), do: cover(nil)\n\n defp maybe_put_path(mapset, path_segment, opts),\n do: MapSet.put(mapset, normalize(path_segment, Keyword.get(opts, :macosx?, false)))\n\n # -- generic matching utilities --\n\n defp check_person_ident(data) do\n with {:missing_email, [?< | email_start]} <-\n {:missing_email, Enum.drop_while(data, &(&1 != ?<))},\n {:bad_email, [?> | after_email]} <-\n {:bad_email, Enum.drop_while(email_start, &(&1 != ?>))},\n {:missing_space_before_date, [?\\s | date]} <- {:missing_space_before_date, after_email},\n {:bad_date, {_date, [?\\s | tz]}} <-\n {:bad_date, ParseDecimal.from_decimal_charlist(date)},\n {:bad_timezone, {_tz, []}} <- {:bad_timezone, ParseDecimal.from_decimal_charlist(tz)} do\n cover :ok\n else\n {:missing_email, _} -> cover :missing_email\n {:bad_email, _} -> cover :bad_email\n {:missing_space_before_date, _} -> cover :missing_space_before_date\n {:bad_date, _} -> cover :bad_date\n {:bad_timezone, _} -> cover :bad_time_zone\n end\n end\n\n defp normalize(name, true = _mac?) when is_list(name) do\n name\n |> ParseCharlist.decode_ambiguous_charlist()\n |> String.downcase()\n |> :unicode.characters_to_nfc_binary()\n end\n\n defp normalize(name, _) when is_list(name), do: Enum.map(name, &to_lower/1)\n\n defp to_lower(b) when b >= ?A and b <= ?Z, do: cover(b + 32)\n defp to_lower(b), do: cover(b)\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,2,null,null,7,null,null,null,null,null,null,null,null,null,null,169,null,null,null,null,null,null,null,null,null,null,null,null,1863,null,null,null,null,null,null,4,null,null,null,null,null,null,5,null,null,null,null,null,null,152,null,null,null,null,null,null,1576,null],"name":"lib/xgit/util/nb.ex","source":"# Copyright (C) 2008, 2015 Shawn O. Pearce \n# and other copyright owners as documented in the project's IP log.\n#\n# Elixir adaptation from jgit file:\n# org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java\n#\n# Copyright (C) 2019, Eric Scouten \n#\n# This program and the accompanying materials are made available\n# under the terms of the Eclipse Distribution License v1.0 which\n# accompanies this distribution, is reproduced below, and is\n# available at http://www.eclipse.org/org/documents/edl-v10.php\n#\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or\n# without modification, are permitted provided that the following\n# conditions are met:\n#\n# - Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#\n# - Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following\n# disclaimer in the documentation and/or other materials provided\n# with the distribution.\n#\n# - Neither the name of the Eclipse Foundation, Inc. nor the\n# names of its contributors may be used to endorse or promote\n# products derived from this software without specific prior\n# written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND\n# CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,\n# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\ndefmodule Xgit.Util.NB do\n @moduledoc false\n\n # Internal conversion utilities for network byte order handling.\n\n use Bitwise\n\n import Xgit.Util.ForceCoverage\n\n @doc ~S\"\"\"\n Parses a sequence of 4 bytes (network byte order) as a signed integer.\n\n Reads the first four bytes from `intbuf` and returns `{value, buf}`\n where value is the integer value from the first four bytes at `intbuf`\n and `buf` is the remainder of the byte array after those bytes.\n \"\"\"\n @spec decode_int32(intbuf :: [byte]) :: {integer, [byte]}\n def decode_int32(intbuf)\n\n def decode_int32([b1, b2, b3, b4 | tail]) when b1 >= 128,\n do: cover({b1 * 0x1000000 + b2 * 0x10000 + b3 * 0x100 + b4 - 0x100000000, tail})\n\n def decode_int32([b1, b2, b3, b4 | tail]),\n do: cover({b1 * 0x1000000 + b2 * 0x10000 + b3 * 0x100 + b4, tail})\n\n @doc ~S\"\"\"\n Parses a sequence of 2 bytes (network byte order) as an unsigned integer.\n\n Reads the first four bytes from `intbuf` and returns `{value, buf}`\n where value is the unsigned integer value from the first two bytes at `intbuf`\n and `buf` is the remainder of the byte array after those bytes.\n \"\"\"\n @spec decode_uint16(intbuf :: [byte]) :: {integer, [byte]}\n def decode_uint16(intbuf)\n def decode_uint16([b1, b2 | tail]), do: cover({b1 * 0x100 + b2, tail})\n\n @doc ~S\"\"\"\n Parses a sequence of 4 bytes (network byte order) as an unsigned integer.\n\n Reads the first four bytes from `intbuf` and returns `{value, buf}`\n where value is the unsigned integer value from the first four bytes at `intbuf`\n and `buf` is the remainder of the byte array after those bytes.\n \"\"\"\n @spec decode_uint32(intbuf :: [byte]) :: {integer, [byte]}\n def decode_uint32(intbuf)\n\n def decode_uint32([b1, b2, b3, b4 | tail]),\n do: cover({b1 * 0x1000000 + b2 * 0x10000 + b3 * 0x100 + b4, tail})\n\n @doc ~S\"\"\"\n Convert a 16-bit integer to a sequence of two bytes in network byte order.\n \"\"\"\n @spec encode_int16(v :: integer) :: [byte]\n def encode_int16(v) when is_integer(v) and v >= -32_768 and v <= 65_535,\n do: cover([v >>> 8 &&& 0xFF, v &&& 0xFF])\n\n @doc ~S\"\"\"\n Convert a 32-bit integer to a sequence of four bytes in network byte order.\n \"\"\"\n @spec encode_int32(v :: integer) :: [byte]\n def encode_int32(v) when is_integer(v) and v >= -2_147_483_647 and v <= 4_294_967_295,\n do: cover([v >>> 24 &&& 0xFF, v >>> 16 &&& 0xFF, v >>> 8 &&& 0xFF, v &&& 0xFF])\n\n @doc ~S\"\"\"\n Convert a 16-bit unsigned integer to a sequence of two bytes in network byte order.\n \"\"\"\n @spec encode_uint16(v :: non_neg_integer) :: [byte]\n def encode_uint16(v) when is_integer(v) and v >= 0 and v <= 65_535,\n do: cover([v >>> 8 &&& 0xFF, v &&& 0xFF])\n\n @doc ~S\"\"\"\n Convert a 32-bit unsigned integer to a sequence of four bytes in network byte order.\n \"\"\"\n @spec encode_uint32(v :: non_neg_integer) :: [byte]\n def encode_uint32(v) when is_integer(v) and v >= 0 and v <= 4_294_967_295,\n do: cover([v >>> 24 &&& 0xFF, v >>> 16 &&& 0xFF, v >>> 8 &&& 0xFF, v &&& 0xFF])\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,4,null,25,null,24,2,2,null,null,22,1,null,null,null,21,20,16,null,13,9,null,null,null,null,null,null,null,null,4,null,null,null,null,21,null,18,null,null,2,null,null,1,null,null,null,null,null,20,null,6,null,null,1,null,null,null,7,null,null,3,null,null,2,null,null,null,1,null,null,null,null,null,16,null,5,4,null,1,null,null,null,null,10,1,null,null,9,null,null,null,1,null,null,null,null,null,9,null,9,null,1,null,null,8,7,null,1,1,null,null,null,null,null,7,6,6,null,null,null,null,null,null,6,6,6,6,null,1,null,null,null,1,null,null,6,1,5,null,null,null,null,6,1,null,5,null,null,null,null,4,4,null,null,2,8,null],"name":"lib/xgit/repository.ex","source":"defmodule Xgit.Repository do\n @moduledoc ~S\"\"\"\n Represents a git repository.\n\n Create a repository by calling the `start_link` function on one of the modules\n that implements `Xgit.Repository.Storage`. The resulting PID can be used when\n calling functions in this module and `Xgit.Repository.Plumbing`.\n\n The functions implemented in this module correspond to the \"porcelain\" commands\n implemented by command-line git.\n\n (As of this writing, relatively few of the porcelain commands are implemented.)\n \"\"\"\n import Xgit.Util.ForceCoverage\n\n alias Xgit.Object\n alias Xgit.ObjectId\n alias Xgit.PersonIdent\n alias Xgit.Ref\n alias Xgit.Repository.Storage\n alias Xgit.Tag\n\n @typedoc ~S\"\"\"\n The process ID for an `Xgit.Repository` process.\n\n This is the same process ID returned from the `start_link` function of any\n module that implements `Xgit.Repository.Storage`.\n \"\"\"\n @type t :: pid\n\n @doc ~S\"\"\"\n Returns `true` if the argument is a PID representing a valid `Xgit.Repository` process.\n \"\"\"\n @spec valid?(repository :: term) :: boolean\n defdelegate valid?(repository), to: Storage\n\n ## -- Tags --\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `tag/4`.\n \"\"\"\n @type tag_reason :: Storage.put_ref_reason()\n\n @doc ~S\"\"\"\n Create a tag object.\n\n Analogous to the _create_ form of [`git tag`](https://git-scm.com/docs/git-tag).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n `tag_name` (`String`) is the name to give to the new tag.\n\n `object` (`Xgit.ObjectId`) is the object ID to be pointed to by this tag\n (typically a `commit` object).\n\n ## Options\n\n `annotated?`: (boolean) true to create an annotated tag (default: `false` unless `message` is specified)\n\n `force?`: (boolean) true to replace an existing tag (default: `false`)\n\n `message`: (`String` or bytelist) message to associate with the tag.\n * Must be present and non-empty if `:annotated?` is `true`.\n * Implies `annotated?: true`.\n\n `tagger`: (`Xgit.PersonIdent`, required if annotated) tagger name, email, timestamp\n\n ## Return Value\n\n `:ok` if created successfully.\n\n `{:error, reason}` if unable. Reason codes may come from `Xgit.Repository.Storage.put_ref/3`.\n\n TO DO: Support GPG signatures. https://github.com/elixir-git/xgit/issues/202\n \"\"\"\n @spec tag(repository :: t, tag_name :: String.t(), object :: ObjectId.t(),\n annotated?: boolean,\n force?: boolean,\n message: [byte] | String.t(),\n tagger: PersonIdent.t()\n ) :: :ok | {:error, reason :: tag_reason}\n def tag(repository, tag_name, object, options \\\\ [])\n when is_pid(repository) and is_binary(tag_name) and is_binary(object) and is_list(options) do\n repository = Storage.assert_valid(repository)\n\n unless Tag.valid_name?(String.to_charlist(tag_name)) do\n raise ArgumentError,\n ~s(Xgit.Repository.tag/4: tag_name \"#{tag_name}\" is invalid)\n end\n\n unless ObjectId.valid?(object) do\n raise ArgumentError,\n \"Xgit.Repository.tag/4: object #{inspect(object)} is invalid\"\n end\n\n force? = force_from_tag_options(options)\n message = message_from_tag_options(options)\n annotated? = annotated_from_tag_options(options, message)\n\n if annotated? do\n create_annotated_tag(\n repository,\n tag_name,\n object,\n force?,\n message,\n tagger_from_tag_options(options)\n )\n else\n create_lightweight_tag(repository, tag_name, object, force?)\n end\n end\n\n defp force_from_tag_options(options) do\n case Keyword.get(options, :force?, false) do\n false ->\n cover false\n\n true ->\n cover true\n\n invalid ->\n raise ArgumentError,\n \"Xgit.Repository.tag/4: force? #{inspect(invalid)} is invalid\"\n end\n end\n\n defp message_from_tag_options(options) do\n case Keyword.get(options, :message) do\n nil ->\n cover nil\n\n \"\" ->\n raise ArgumentError,\n \"Xgit.Repository.tag/4: message must be non-empty if present\"\n\n message when is_binary(message) ->\n String.to_charlist(message)\n\n [_ | _] = message ->\n cover message\n\n [] ->\n raise ArgumentError,\n \"Xgit.Repository.tag/4: message must be non-empty if present\"\n\n invalid ->\n raise ArgumentError,\n \"Xgit.Repository.tag/4: message #{inspect(invalid)} is invalid\"\n end\n end\n\n defp annotated_from_tag_options(options, message) do\n case Keyword.get(options, :annotated?, message != nil) do\n false ->\n if message == nil do\n cover false\n else\n raise ArgumentError,\n \"Xgit.Repository.tag/4: annotated?: false can not be specified when message is present\"\n end\n\n true ->\n if message == nil do\n raise ArgumentError,\n \"Xgit.Repository.tag/4: annotated?: true can not be specified without message\"\n else\n cover true\n end\n\n invalid ->\n raise ArgumentError,\n \"Xgit.Repository.tag/4: annotated? #{inspect(invalid)} is invalid\"\n end\n end\n\n defp tagger_from_tag_options(options) do\n tagger = Keyword.get(options, :tagger)\n\n cond do\n tagger == nil ->\n raise ArgumentError,\n \"Xgit.Repository.tag/4: tagger must be specified for an annotated tag\"\n\n PersonIdent.valid?(tagger) ->\n cover tagger\n\n true ->\n raise ArgumentError,\n \"Xgit.Repository.tag/4: tagger #{inspect(tagger)} is invalid\"\n end\n end\n\n defp create_annotated_tag(repository, tag_name, object, force?, message, tagger) do\n with :ok <- check_existing_ref(repository, tag_name, force?),\n {:ok, %Object{type: target_type}} <- Storage.get_object(repository, object),\n tag <- %Tag{\n object: object,\n type: target_type,\n name: String.to_charlist(tag_name),\n tagger: tagger,\n message: ensure_trailing_newline(message)\n },\n %Object{id: tag_id} = tag_object <- Tag.to_object(tag),\n :ok <- Storage.put_loose_object(repository, tag_object) do\n ref = %Ref{name: \"refs/tags/#{tag_name}\", target: tag_id}\n Storage.put_ref(repository, ref, opts_for_force(force?))\n else\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp check_existing_ref(_repository, _tag_name, true), do: cover(:ok)\n\n defp check_existing_ref(repository, tag_name, false) do\n case Storage.get_ref(repository, \"refs/tags/#{tag_name}\") do\n {:ok, %Ref{}} -> cover {:error, :old_target_not_matched}\n {:error, :not_found} -> cover :ok\n end\n end\n\n defp ensure_trailing_newline(message) do\n if List.last(message) == 10 do\n cover(message)\n else\n cover(message ++ '\\n')\n end\n end\n\n defp create_lightweight_tag(repository, tag_name, object, force?) do\n ref = %Ref{name: \"refs/tags/#{tag_name}\", target: object}\n Storage.put_ref(repository, ref, opts_for_force(force?))\n end\n\n defp opts_for_force(true), do: cover(follow_link?: false)\n defp opts_for_force(false), do: cover(follow_link?: false, old_target: :new)\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,643,null,null,null,643,640,2,null,null,null,null,null,null,null,null,1463,1463,null,null,1,null,1,null,null,null,null,null,null,null,1,null,null,1338,1322,null,16,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,51,null,null,null,116,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,528,null,null,null,null,null,null,4,null,null,null,null,null,null,null,null,null,null,null,null,null,19,null,null,null,null,null,null,19,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,185,null,null,null,null,null,48,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,224,null,null,null,null,null,213,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,87,null,null,null,null,null,87,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,44,null,null,null,99,97,null,2,null,null,null,null,null,null,58,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,8,null,null,null,28,25,null,3,null,null,null,null,null,null,null,16,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,51,null,null,null,66,64,null,2,null,null,null,null,null,null,null,55,null,null,11,55,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,74,74,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,49,null,65,63,null,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,6,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1447,null,null,116,null,null,527,526,null,1,null,null,null,null,1,null,null,19,null,null,185,null,null,224,null,null,87,null,null,97,null,null,25,null,null,64,null,null,74,null,null,63,null,null,6,null,null,1,1,null,null,null,844,377,376,91,null,null,null,null,19,19,null,null,null,null,null,null,null,null,null,null],"name":"lib/xgit/repository/storage.ex","source":"defmodule Xgit.Repository.Storage do\n @moduledoc ~S\"\"\"\n Represents the persistent storage for a git repository.\n\n Unless you are implementing an alternative storage architecture or implementing\n plumbing-level commands, this module is probably not of interest to you.\n\n ## Design Goals\n\n Xgit intends to allow repositories to be stored in multiple different mechanisms.\n While it includes built-in support for local on-disk repositories\n (see `Xgit.Repository.OnDisk`), and in-member repositories (see `Xgit.Repository.InMemory`),\n you could envision repositories stored entirely on a remote file system or database.\n\n ## Implementing a Storage Architecture\n\n To define a new mechanism for storing a git repo, create a new module that `use`s\n this module and implements the required callbacks. Consider the information stored\n in a typical `.git` directory in a local repository. You will be building an\n alternative to that storage mechanism.\n \"\"\"\n use GenServer\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.ConfigEntry\n alias Xgit.Object\n alias Xgit.ObjectId\n alias Xgit.Ref\n alias Xgit.Repository.InvalidRepositoryError\n alias Xgit.Repository.WorkingTree\n\n require Logger\n\n @typedoc ~S\"\"\"\n The process ID for an `Xgit.Repository.Storage` process.\n \"\"\"\n @type t :: pid | {:xgit_repo, pid}\n\n @doc \"\"\"\n Starts an `Xgit.Repository.Storage` process linked to the current process.\n\n _IMPORTANT:_ You should not invoke this function directly unless you are\n implementing a new storage implementation module that implements this behaviour.\n\n ## Parameters\n\n `module` is the name of a module that implements the callbacks defined in this module.\n\n `init_arg` is passed to the `init/1` function of `module`.\n\n `options` are passed to `GenServer.start_link/3`.\n\n ## Return Value\n\n See `GenServer.start_link/3`.\n \"\"\"\n @spec start_link(module :: module, init_arg :: term, GenServer.options()) ::\n GenServer.on_start()\n def start_link(module, init_arg, options) when is_atom(module) and is_list(options),\n do: GenServer.start_link(__MODULE__, {module, init_arg}, options)\n\n @impl true\n def init({mod, mod_init_arg}) do\n case mod.init(mod_init_arg) do\n {:ok, mod_state} -> cover {:ok, %{mod: mod, mod_state: mod_state, working_tree: nil}}\n {:stop, reason} -> cover {:stop, reason}\n end\n end\n\n @doc ~S\"\"\"\n Returns `true` if the argument is a PID representing a valid `Xgit.Repository.Storage` process.\n \"\"\"\n @spec valid?(repository :: term) :: boolean\n def valid?(repository) when is_pid(repository) do\n Process.alive?(repository) &&\n GenServer.call(repository, :valid_repository?) == :valid_repository\n end\n\n def valid?({:xgit_repo, repository}) when is_pid(repository), do: cover(true)\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Raises `Xgit.Repository.InvalidRepositoryError` if the value provided is anything\n other than the process ID for a valid `Xgit.Repository.Storage` process.\n \"\"\"\n @spec assert_valid(repository :: t) :: t | no_return\n def assert_valid({:xgit_repo, repository} = checked_repo) when is_pid(repository),\n do: cover(checked_repo)\n\n def assert_valid(repository) do\n if is_pid(repository) && valid?(repository) do\n cover {:xgit_repo, repository}\n else\n raise InvalidRepositoryError\n end\n end\n\n ## --- Working Tree ---\n\n @doc ~S\"\"\"\n Get the default working tree if one has been attached.\n\n Other working trees may also be attached to this repository, but do not have\n special status with regard to the repository.\n \"\"\"\n @spec default_working_tree(repository :: t) :: WorkingTree.t() | nil\n def default_working_tree(repository) when is_pid(repository) do\n repository\n |> assert_valid()\n |> default_working_tree()\n end\n\n def default_working_tree({:xgit_repo, repository}) when is_pid(repository) do\n GenServer.call(repository, :default_working_tree)\n end\n\n @doc ~S\"\"\"\n Attach a working tree to this repository as the default working tree.\n\n Future plumbing and API commands that target this repository will use this\n working tree unless otherwise dictated.\n\n ## Return Value\n\n `:ok` if the working tree was successfully attached.\n\n `:error` if a working tree was already attached or the proposed working tree\n was not valid.\n \"\"\"\n @spec set_default_working_tree(repository :: t, working_tree :: WorkingTree.t()) :: :ok | :error\n def set_default_working_tree({:xgit_repo, repository}, working_tree)\n when is_pid(repository) and is_pid(working_tree) do\n GenServer.call(repository, {:set_default_working_tree, working_tree})\n end\n\n def set_default_working_tree(repository, working_tree)\n when is_pid(repository) and is_pid(working_tree) do\n repository\n |> assert_valid()\n |> set_default_working_tree(working_tree)\n end\n\n ## --- Objects ---\n\n @doc ~S\"\"\"\n Returns `true` if all objects in the list are present in the object dictionary.\n\n This limit is not enforced, but it's recommended to query for no more than ~100 object\n IDs at a time.\n \"\"\"\n @spec has_all_object_ids?(repository :: t, object_ids :: [ObjectId.t()]) :: boolean\n def has_all_object_ids?({:xgit_repo, repository}, object_ids)\n when is_pid(repository) and is_list(object_ids) do\n GenServer.call(repository, {:has_all_object_ids?, object_ids})\n end\n\n def has_all_object_ids?(repository, object_ids)\n when is_pid(repository) and is_list(object_ids) do\n repository\n |> assert_valid()\n |> has_all_object_ids?(object_ids)\n end\n\n @doc ~S\"\"\"\n Checks for presence of multiple object Ids.\n\n Called when `has_all_object_ids?/2` is called.\n\n ## Return Value\n\n Should return `{:ok, has_all_object_ids?, state}` where `has_all_object_ids?` is `true`\n if all object IDs can be found in the object dictionary; `false` otherwise.\n \"\"\"\n @callback handle_has_all_object_ids?(state :: any, object_ids :: [ObjectId.t()]) ::\n {:ok, has_all_object_ids? :: boolean, state :: any}\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `get_object/2`.\n \"\"\"\n @type get_object_reason :: :not_found | :invalid_object\n\n @doc ~S\"\"\"\n Retrieves an object from the repository.\n\n ## Return Value\n\n `{:ok, object}` if the object exists in the database.\n\n `{:error, :not_found}` if the object does not exist in the database.\n\n `{:error, :invalid_object}` if object was found, but invalid.\n \"\"\"\n @spec get_object(repository :: t, object_id :: ObjectId.t()) ::\n {:ok, object :: Object.t()} | {:error, reason :: get_object_reason}\n def get_object({:xgit_repo, repository}, object_id)\n when is_pid(repository) and is_binary(object_id) do\n GenServer.call(repository, {:get_object, object_id})\n end\n\n def get_object(repository, object_id) when is_pid(repository) and is_binary(object_id) do\n repository\n |> assert_valid()\n |> get_object(object_id)\n end\n\n @doc ~S\"\"\"\n Retrieves an object from the repository.\n\n Called when `get_object/2` is called.\n\n ## Return Value\n\n Should return `{:ok, object, state}` if read successfully.\n\n Should return `{:error, :not_found, state}` if unable to find the object.\n\n Should return `{:error, :invalid_object, state}` if object was found, but invalid.\n \"\"\"\n @callback handle_get_object(state :: any, object_id :: ObjectId.t()) ::\n {:ok, object :: Object.t(), state :: any}\n | {:error, reason :: get_object_reason, state :: any}\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `put_loose_object/2`.\n \"\"\"\n @type put_loose_object_reason :: :cant_create_file | :object_exists\n\n @doc ~S\"\"\"\n Writes a loose object to the repository.\n\n ## Return Value\n\n `:ok` if written successfully.\n\n `{:error, :cant_create_file}` if unable to create the storage for the loose object.\n\n `{:error, :object_exists}` if the object already exists in the database.\n \"\"\"\n @spec put_loose_object(repository :: t, object :: Object.t()) ::\n :ok | {:error, reason :: put_loose_object_reason}\n def put_loose_object({:xgit_repo, repository}, %Object{} = object) when is_pid(repository) do\n GenServer.call(repository, {:put_loose_object, object})\n end\n\n def put_loose_object(repository, %Object{} = object) when is_pid(repository) do\n repository\n |> assert_valid()\n |> put_loose_object(object)\n end\n\n @doc ~S\"\"\"\n Writes a loose object to the repository.\n\n Called when `put_loose_object/2` is called.\n\n ## Return Value\n\n Should return `{:ok, state}` if written successfully.\n\n Should return `{:error, :cant_create_file}` if unable to create the storage for\n the loose object.\n\n Should return `{:error, :object_exists}` if the object already exists in the database.\n \"\"\"\n @callback handle_put_loose_object(state :: any, object :: Object.t()) ::\n {:ok, state :: any} | {:error, reason :: put_loose_object_reason, state :: any}\n\n ## --- References ---\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `list_refs/1`.\n \"\"\"\n @type list_refs_reason :: File.posix()\n\n @doc ~S\"\"\"\n Lists all references in the repository.\n\n ## Return Value\n\n `{:ok, refs}` if successful. `refs` will be a list of `Xgit.Ref` structs.\n The sequence of the list is unspecified.\n\n `{:error, reason}` if unable. See `list_refs_reason`.\n \"\"\"\n @spec list_refs(repository :: t) ::\n {:ok, refs :: [Ref.t()]} | {:error, reason :: list_refs_reason}\n def list_refs({:xgit_repo, repository}) when is_pid(repository) do\n GenServer.call(repository, :list_refs)\n end\n\n def list_refs(repository) when is_pid(repository) do\n repository\n |> assert_valid()\n |> list_refs()\n end\n\n @doc ~S\"\"\"\n Lists all references in the repository.\n\n Called when `list_refs/1` is called.\n\n ## Return Value\n\n Should return `{:ok, refs, state}` if read successfully. `refs` should be a list\n of `Xgit.Ref` structs.\n\n Should return `{:error, reason}` if unable. Currently only `File.posix` reasons\n are expected.\n \"\"\"\n @callback handle_list_refs(state :: any) ::\n {:ok, refs :: [Ref], state :: any}\n | {:error, reason :: list_refs_reason, state :: any}\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `put_ref/3`.\n \"\"\"\n @type put_ref_reason ::\n :invalid_ref\n | :cant_create_file\n | :target_not_found\n | :old_target_not_matched\n\n @doc ~S\"\"\"\n Writes or updates a reference in the repository.\n\n If any existing reference exists with this name, it will be replaced.\n\n ## Options\n\n `follow_link?`: (default: `true`) `true` to follow symbolic refs\n\n `old_target`: If present, a ref with this name must already exist and the `target`\n value must match the object ID provided in this option. (There is a special value `:new`\n which instead requires that the named ref must **not** exist.)\n\n ## TO DO\n\n Support for ref log. https://github.com/elixir-git/xgit/issues/224\n\n Support for `--no-deref` option. https://github.com/elixir-git/xgit/issues/226\n\n ## Return Value\n\n `:ok` if written successfully.\n\n `{:error, :invalid_ref}` if the `Xgit.Ref` structure is invalid.\n\n `{:error, :cant_create_file}` if unable to create the storage for the reference.\n\n `{:error, :target_not_found}` if the target object does not exist in the repository.\n\n `{:error, :old_target_not_matched}` if `old_target` was specified and the target ref points\n to a different object ID.\n \"\"\"\n @spec put_ref(repository :: t, ref :: Ref.t(), follow_link?: boolean, old_target: ObjectId.t()) ::\n :ok | {:error, reason :: put_ref_reason}\n def put_ref(repository, ref, opts \\\\ [])\n\n def put_ref({:xgit_repo, repository}, %Ref{} = ref, opts)\n when is_pid(repository) and is_list(opts) do\n if Ref.valid?(ref) do\n GenServer.call(repository, {:put_ref, ref, opts})\n else\n cover {:error, :invalid_ref}\n end\n end\n\n def put_ref(repository, ref, opts) when is_pid(repository) and is_list(opts) do\n repository\n |> assert_valid()\n |> put_ref(ref, opts)\n end\n\n @doc ~S\"\"\"\n Writes or updates a reference in the repository.\n\n Called when `put_ref/3` is called.\n\n The implementation must validate that the referenced object exists and is of\n type `commit`. It does not need to validate that the reference is otherwise\n valid.\n\n ## Options\n\n `follow_link?`: (default: `true`) `true` to follow symbolic refs\n\n `old_target`: If present, a ref with this name must already exist and the `target`\n value must match the object ID provided in this option. (There is a special value `:new`\n which instead requires that the named ref must **not** exist.)\n\n ## Return Value\n\n Should return `{:ok, state}` if written successfully.\n\n Should return `{:error, :cant_create_file}` if unable to create the storage for\n the ref.\n\n Should return `{:error, :target_not_found}` if the target object does not\n exist in the repository.\n\n Should return `{:error, :old_target_not_matched}` if `old_target` was specified and the\n target ref points to a different object ID.\n \"\"\"\n @callback handle_put_ref(state :: any, ref :: Ref.t(),\n follow_link?: boolean,\n old_target: ObjectId.t()\n ) ::\n {:ok, state :: any} | {:error, reason :: put_ref_reason, state :: any}\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `delete_ref/3`.\n \"\"\"\n @type delete_ref_reason :: :invalid_ref | :cant_delete_file | :old_target_not_matched\n\n @doc ~S\"\"\"\n Deletes a reference from the repository.\n\n ## Options\n\n `follow_link?`: (default: `true`) `true` to follow symbolic refs\n\n `old_target`: If present, a ref with this name must already exist and the `target`\n value must match the object ID provided in this option.\n\n ## TO DO\n\n Support for ref log. https://github.com/elixir-git/xgit/issues/224\n\n Support for `--no-deref` option. https://github.com/elixir-git/xgit/issues/226\n\n ## Return Value\n\n `:ok` if deleted successfully or the reference did not exist.\n\n `{:error, :invalid_ref}` if `name` is not a valid ref name.\n\n `{:error, :cant_delete_file}` if unable to delete the storage for the reference.\n\n `{:error, :old_target_not_matched}` if `old_target` was specified and the target ref points\n to a different object ID or did not exist.\n \"\"\"\n @spec delete_ref(repository :: t, name :: Ref.name(),\n follow_link?: boolean,\n old_target: ObjectId.t()\n ) ::\n :ok | {:error, reason :: delete_ref_reason}\n def delete_ref(repository, name, opts \\\\ [])\n\n def delete_ref({:xgit_repo, repository}, name, opts)\n when is_pid(repository) and is_binary(name) and is_list(opts) do\n if Ref.valid_name?(name) do\n GenServer.call(repository, {:delete_ref, name, opts})\n else\n cover {:error, :invalid_ref}\n end\n end\n\n def delete_ref(repository, name, opts)\n when is_pid(repository) and is_binary(name) and is_list(opts) do\n repository\n |> assert_valid()\n |> delete_ref(name, opts)\n end\n\n @doc ~S\"\"\"\n Deletes a reference in the repository.\n\n Called when `delete_ref/3` is called.\n\n ## Options\n\n `follow_link?`: `true` to follow symbolic refs\n\n `old_target`: If present, a ref with this name must already exist and the `target`\n value must match the object ID provided in this option.\n\n ## Return Value\n\n Should return `{:ok, state}` if deleted successfully or the ref did not exist.\n\n Should return `{:error, :cant_delete_file}` if unable to delete the storage for\n the ref.\n\n Should return `{:error, :old_target_not_matched}` if `old_target` was specified and the\n target ref points to a different object ID or the ref did not exist.\n \"\"\"\n @callback handle_delete_ref(state :: any, name :: Ref.name(),\n follow_link?: boolean,\n old_target: ObjectId.t()\n ) ::\n {:ok, state :: any} | {:error, reason :: delete_ref_reason, state :: any}\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `get_ref/2`.\n \"\"\"\n @type get_ref_reason :: File.posix() | :invalid_name | :not_found\n\n @doc ~S\"\"\"\n Reads a reference from the repository.\n\n If any existing reference exists with this name, it will be returned.\n\n ## Parameters\n\n `name` is the name of the reference to be found. It must be a valid name\n as per `Xgit.Ref.valid_name?/1`.\n\n ## Options\n\n `follow_link?`: (default: `true`) `true` to follow symbolic refs\n\n ## TO DO\n\n Dereference tags? https://github.com/elixir-git/xgit/issues/228\n\n ## Return Value\n\n `{:ok, ref}` if the reference was found successfully. `ref` will be an\n `Xgit.Ref` struct.\n\n `{:error, :invalid_name}` if `name` is not a valid ref name.\n\n `{:error, :not_found}` if no such reference exists.\n \"\"\"\n @spec get_ref(repository :: t, name :: String.t(), follow_link?: boolean) ::\n {:ok, ref :: Ref.t()} | {:error, reason :: get_ref_reason}\n def get_ref(repository, name, opts \\\\ [])\n\n def get_ref({:xgit_repo, repository}, name, opts)\n when is_pid(repository) and is_binary(name) and is_list(opts) do\n if valid_ref_name?(name) do\n GenServer.call(repository, {:get_ref, name, opts})\n else\n cover {:error, :invalid_name}\n end\n end\n\n def get_ref(repository, name, opts)\n when is_pid(repository) and is_binary(name) and is_list(opts) do\n repository\n |> assert_valid()\n |> get_ref(name, opts)\n end\n\n defp valid_ref_name?(\"HEAD\"), do: cover(true)\n defp valid_ref_name?(name), do: Ref.valid_name?(name)\n\n @doc ~S\"\"\"\n Reads a reference from the repository.\n\n Called when `get_ref/3` is called.\n\n ## Options\n\n `follow_link?`: (default: `true`) `true` to follow symbolic refs\n\n ## Return Value\n\n Should return `{:ok, ref, state}` if the reference was found successfully.\n `ref` must be an `Xgit.Ref` struct.\n\n Should return `{:error, :not_found, state}` if no such reference exists.\n \"\"\"\n @callback handle_get_ref(state :: any, name :: String.t(), follow_link?: boolean) ::\n {:ok, ref :: Xgit.Ref.t(), state :: any}\n | {:error, reason :: get_ref_reason, state :: any}\n\n # TO DO: Add a `pack_refs` function. https://github.com/elixir-git/xgit/issues/223\n\n ## --- Config ---\n\n @doc ~S\"\"\"\n Return any configuration entries that match the requested search.\n\n The entries are not necessarily sorted; the order in which they are returned is up to\n the underlying storage mechanism.\n\n ## Options\n\n * `section:` (`String`) if provided, only returns entries in the named section\n * `subsection:` (`String`) if provided, only returns entries in the named subsection\n * `name:` (`String`) if provided, only returns entries with the given variable name\n\n If no options are provided, returns all entries.\n\n ## Return Values\n\n A list of `Xgit.ConfigEntry` structs that match the search parameters.\n \"\"\"\n @spec get_config_entries(repository :: t,\n section: String.t(),\n subsection: String.t(),\n name: String.t()\n ) :: [Xgit.ConfigEntry.t()]\n def get_config_entries(repository, opts \\\\ []) when is_pid(repository) and is_list(opts) do\n {:ok, entries} = GenServer.call(repository, {:get_config_entries, opts})\n entries\n end\n\n @doc ~S\"\"\"\n Return any configuration entries that match the requested search.\n\n Called when `get_config_entries/2` is called.\n\n The entries need not be sorted.\n\n ## Options\n\n * `section:` (`String`) if provided, only returns entries in the named section\n * `subsection:` (`String`) if provided, only returns entries in the named subsection\n * `name:` (`String`) if provided, only returns entries with the given variable name\n\n If no options are provided, returns all entries.\n\n ## Return Value\n\n Should return `{:ok, entries, state}` where `entries` is a list of `Xgit.ConfigEntry`\n structs that match the search parameters.\n \"\"\"\n @callback handle_get_config_entries(state :: any,\n section: String.t(),\n subsection: String.t(),\n name: String.t()\n ) ::\n {:ok, entries :: [Xgit.ConfigEntry.t()], state :: any}\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `add_config_entry/3`.\n \"\"\"\n @type add_config_entry_reason :: File.posix()\n\n @doc ~S\"\"\"\n Add an entry to an existing config.\n\n ## Parameters\n\n `entry` (`Xgit.ConfigEntry`) entry to be added\n\n ## Options\n\n `add?`: if `true`, adds this entry to any that may already exist\n `replace_all?`: if `true`, removes all existing entries that match any keys provided\n before adding the existing one\n\n ## Return Values\n\n `:ok` if successful.\n\n `{:error, TBD}` if unable.\n \"\"\"\n @spec add_config_entry(repository :: t, entry :: Xgit.ConfigEntry.t(),\n add?: boolean,\n replace_all?: boolean\n ) ::\n :ok | {:error, reason :: add_config_entry_reason}\n def add_config_entry(repository, %ConfigEntry{} = entry, opts \\\\ [])\n when is_pid(repository) and is_list(opts) do\n if ConfigEntry.valid?(entry) do\n GenServer.call(repository, {:add_config_entry, entry, opts})\n else\n raise ArgumentError,\n \"Xgit.Repository.Storage.add_config_entry/3: entry is invalid\"\n end\n end\n\n @doc ~S\"\"\"\n Add a new entry to an existing config.\n\n Called when `add_config_entry/3` is called.\n\n ## Parameters\n\n `entry` (`Xgit.ConfigEntry`) entry to be added\n\n ## Options\n\n `add?`: if `true`, adds this entry to any that may already exist\n `replace_all?`: if `true`, removes all existing entries that match any keys provided\n before adding this one\n\n ## Return Value\n\n Should return `{:ok, state}` if successful.\n\n Should return `{:error, reason, state}` if unable to complete the update.\n \"\"\"\n @callback handle_add_config_entry(\n state :: any,\n entry :: Xgit.ConfigEntry.t(),\n add?: boolean,\n replace_all?: boolean\n ) ::\n {:ok, state :: any}\n | {:error, reason :: add_config_entry_reason, state :: any}\n\n @doc ~S\"\"\"\n Remove any configuration entries that match the requested search.\n\n ## Options\n\n * `section:` (`String`) if provided, only removes entries in the named section\n * `subsection:` (`String`) if provided, only removes entries in the named subsection\n * `name:` (`String`) if provided, only removes entries with the given variable name\n\n **WARNING:** If no options are provided, removes all entries.\n\n ## Return Values\n\n `:ok` regardless of whether any matching items were found to remove.\n \"\"\"\n @spec remove_config_entries(repository :: t,\n section: String.t(),\n subsection: String.t(),\n name: String.t()\n ) :: :ok\n def remove_config_entries(repository, opts \\\\ []) when is_pid(repository) and is_list(opts) do\n GenServer.call(repository, {:remove_config_entries, opts})\n end\n\n @doc ~S\"\"\"\n Remove any configuration entries that match the requested search.\n\n Called when `remove_config_entries/2` is called.\n\n ## Options\n\n * `section:` (`String`) if provided, only removes entries in the named section\n * `subsection:` (`String`) if provided, only removes entries in the named subsection\n * `name:` (`String`) if provided, only removes entries with the given variable name\n\n If no options are provided, removes all entries.\n\n ## Return Value\n\n Should return `{:ok, state}` if successful. (This _could_ mean no matching items were\n found to remove.)\n \"\"\"\n @callback handle_remove_config_entries(state :: any,\n section: String.t(),\n subsection: String.t(),\n name: String.t()\n ) :: {:ok, state :: any}\n\n ## --- Callbacks ---\n\n @impl true\n def handle_call(:valid_repository?, _from, state), do: {:reply, :valid_repository, state}\n\n def handle_call(:default_working_tree, _from, %{working_tree: working_tree} = state),\n do: {:reply, working_tree, state}\n\n def handle_call({:set_default_working_tree, working_tree}, _from, %{working_tree: nil} = state) do\n if WorkingTree.valid?(working_tree) do\n {:reply, :ok, %{state | working_tree: working_tree}}\n else\n {:reply, :error, state}\n end\n end\n\n def handle_call({:set_default_working_tree, _working_tree}, _from, state),\n do: {:reply, :error, state}\n\n def handle_call({:has_all_object_ids?, object_ids}, _from, state),\n do: delegate_boolean_call_to(state, :handle_has_all_object_ids?, [object_ids])\n\n def handle_call({:get_object, object_id}, _from, state),\n do: delegate_call_to(state, :handle_get_object, [object_id])\n\n def handle_call({:put_loose_object, %Object{} = object}, _from, state),\n do: delegate_call_to(state, :handle_put_loose_object, [object])\n\n def handle_call(:list_refs, _from, state),\n do: delegate_call_to(state, :handle_list_refs, [])\n\n def handle_call({:put_ref, %Ref{} = ref, opts}, _from, state),\n do: delegate_call_to(state, :handle_put_ref, [ref, opts])\n\n def handle_call({:delete_ref, name, opts}, _from, state),\n do: delegate_call_to(state, :handle_delete_ref, [name, opts])\n\n def handle_call({:get_ref, name, opts}, _from, state),\n do: delegate_call_to(state, :handle_get_ref, [name, opts])\n\n def handle_call({:get_config_entries, opts}, _from, state),\n do: delegate_call_to(state, :handle_get_config_entries, [opts])\n\n def handle_call({:add_config_entry, entry, opts}, _from, state),\n do: delegate_call_to(state, :handle_add_config_entry, [entry, opts])\n\n def handle_call({:remove_config_entries, opts}, _from, state),\n do: delegate_call_to(state, :handle_remove_config_entries, [opts])\n\n def handle_call(message, _from, state) do\n Logger.warn(\"Repository received unrecognized call #{inspect(message)}\")\n {:reply, {:error, :unknown_message}, state}\n end\n\n defp delegate_call_to(%{mod: mod, mod_state: mod_state} = state, function, args) do\n case apply(mod, function, [mod_state | args]) do\n {:ok, mod_state} -> {:reply, :ok, %{state | mod_state: mod_state}}\n {:ok, response, mod_state} -> {:reply, {:ok, response}, %{state | mod_state: mod_state}}\n {:error, reason, mod_state} -> {:reply, {:error, reason}, %{state | mod_state: mod_state}}\n end\n end\n\n defp delegate_boolean_call_to(state, function, args) do\n {:reply, {:ok, response}, state} = delegate_call_to(state, function, args)\n cover {:reply, response, state}\n end\n\n defmacro __using__(opts) do\n quote location: :keep, bind_quoted: [opts: opts] do\n use GenServer, opts\n alias Xgit.Repository.Storage\n @behaviour Storage\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"name":"lib/xgit/util/shared_test_case.ex","source":"defmodule Xgit.Util.SharedTestCase do\n @moduledoc false\n\n # Code to encourage sharing of test cases.\n # Adapted from https://blog.codeminer42.com/how-to-test-shared-behavior-in-elixir-3ea3ebb92b64/.\n\n defmacro define_shared_tests(do: block) do\n quote do\n defmacro __using__(options) do\n block = unquote(Macro.escape(block))\n\n async? = Keyword.get(options, :async, false)\n options_without_async = Keyword.delete(options, :async)\n\n quote do\n use ExUnit.Case, async: unquote(async?)\n\n @moduletag unquote(options_without_async)\n unquote(block)\n end\n end\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,133,null,null,null,null,null,null,null,null,128,null,null,null,133,null,133,1,null,null,null,132,null,132,1,null,null,null,131,null,131,1,null,null,null,130,null,130,1,null,null,null,129,1,null,null,null,128,null,null,null,null,null,null,null,null,null,null,128,null,null,null,128,null,null,null,null,null,2,1,1,null,null,null,null,1,null,null,null,null,null,null,128,null,5,null,null,122,121,1,null,null,null,1,null,126,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,6,null,5,4,null,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,8,null,7,6,4,null,2,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,39,null,38,37,35,null,2,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,20,null,19,null,5,5,5,5,null,14,null,null,null,null,19,16,11,9,7,null,5,null,14,null,null,null,null,19,18,17,16,null,3,null,null,null,2,null,null,13,9,null,4,null,null,null,1,null,null,15,13,11,null,4,null,null,null,null,10,9,null,1,null,null,null,1,null,null,16,12,null,4,null,null,null,null,5,null,null,null,null,null,null,null,null,null,5,1,null,4,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,30,null,29,28,26,null,2,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,6,4,null,3,null,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,32,null,34,32,31,null,30,null,1,1,1,null,null,null,null,32,31,null,1,null,null,null,null,null,27,null,1,null,null,27,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,31,30,null,1,null,null,null,2,1,null,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,4,null,11,10,9,1,null,8,null,null,1,null,null,null,null,10,null,10,1,null,null,null,9,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,18,15,13,null,1,null,null,null,null,15,null,15,1,null,null,null,14,null,14,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,27,null,35,34,null,33,8,null,25,null,null,null,null,34,25,8,null,null,null,null,25,null,null,null,2,null,null,null,7,6,null,1,null,null,null,null,null,25,null,24,23,null,1,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,6,null,5,null,2,null,null,1,null,null,1,null,null,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,10,null,9,null,8,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,5,null,null,null,null,null,null,null,69,null,65,61,4,null,null,null,null,null,null,null,null,null,null,65,null,null],"name":"lib/xgit/repository/plumbing.ex","source":"defmodule Xgit.Repository.Plumbing do\n @moduledoc ~S\"\"\"\n Implements the \"plumbing\"-level commands for a git repository.\n\n The functions in this module, like the \"plumbing\" commands in command-line\n git, are typically not of interest to an end-user developer. Instead, these\n are the raw building-block operations that are often composed together to\n make the user-targeted \"porcelain\" commands.\n\n Most of the functions in this module expect a `repository` argument, which\n should be the process ID (PID) for a process that implements the `Xgit.Repository.Storage`\n behaviour. It's not stated for each individual function, but if `repository`\n is some other value, the error `Xgit.Repository.InvalidRepositoryError` is\n raised.\n \"\"\"\n use Xgit.FileMode\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.Commit\n alias Xgit.ContentSource\n alias Xgit.DirCache\n alias Xgit.DirCache.Entry, as: DirCacheEntry\n alias Xgit.FilePath\n alias Xgit.Object\n alias Xgit.ObjectId\n alias Xgit.ObjectType\n alias Xgit.PersonIdent\n alias Xgit.Ref\n alias Xgit.Repository.Storage\n alias Xgit.Repository.WorkingTree\n alias Xgit.Tag\n alias Xgit.Tree\n\n ## --- Objects ---\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `hash_object/2`.\n \"\"\"\n @type hash_object_reason ::\n Object.check_reason()\n | FilePath.check_path_reason()\n | FilePath.check_path_segment_reason()\n | Storage.put_loose_object_reason()\n\n @doc ~S\"\"\"\n Computes an object ID and optionally writes that into the repository's object store.\n\n Analogous to [`git hash-object`](https://git-scm.com/docs/git-hash-object).\n\n ## Parameters\n\n `content` describes how this function should obtain the content.\n (See `Xgit.ContentSource`.)\n\n ## Options\n\n `:type`: the object's type\n * Type: `Xgit.ObjectType`\n * Default: `:blob`\n * See [`-t` option on `git hash-object`](https://git-scm.com/docs/git-hash-object#Documentation/git-hash-object.txt--tlttypegt).\n\n `:validate?`: `true` to verify that the object is valid for `:type`\n * Type: boolean\n * Default: `true`\n * This is the inverse of the [`--literally` option on `git hash-object`](https://git-scm.com/docs/git-hash-object#Documentation/git-hash-object.txt---literally).\n\n `:repo`: where the content should be stored\n * Type: `Xgit.Repository.Storage` (PID)\n * Default: `nil`\n\n `:write?`: `true` to write the object into the repository\n * Type: boolean\n * Default: `false`\n * This option is meaningless if `:repo` is not specified.\n * See [`-w` option on `git hash-object`](https://git-scm.com/docs/git-hash-object#Documentation/git-hash-object.txt--w).\n\n _TO DO:_ There is no support, at present, for filters as defined in a\n `.gitattributes` file. See [issue #18](https://github.com/elixir-git/xgit/issues/18).\n\n ## Return Values\n\n `{:ok, object_id}` if the object could be validated and assigned an ID.\n\n `{:error, :reason}` if unable. The relevant reason codes may come from:\n\n * `Xgit.FilePath.check_path/2`\n * `Xgit.FilePath.check_path_segment/2`\n * `Xgit.Object.check/2`\n * `Xgit.Repository.Storage.put_loose_object/2`.\n \"\"\"\n @spec hash_object(content :: ContentSource.t(),\n type: ObjectType.t(),\n validate?: boolean,\n repo: Storage.t(),\n write?: boolean\n ) ::\n {:ok, object_id :: ObjectId.t()} | {:error, reason :: hash_object_reason}\n def hash_object(content, opts \\\\ []) when not is_nil(content) and is_list(opts) do\n %{type: type, validate?: validate?, repo: repo, write?: write?} =\n validate_hash_object_options(opts)\n\n %Object{content: content, type: type}\n |> apply_filters(repo)\n |> annotate_with_size()\n |> assign_object_id()\n |> validate_content(validate?)\n |> maybe_write_to_repo(repo, write?)\n |> hash_object_result(opts)\n end\n\n defp validate_hash_object_options(opts) do\n type = Keyword.get(opts, :type, :blob)\n\n unless ObjectType.valid?(type) do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.hash_object/2: type #{inspect(type)} is invalid\"\n end\n\n validate? = Keyword.get(opts, :validate?, true)\n\n unless is_boolean(validate?) do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.hash_object/2: validate? #{inspect(validate?)} is invalid\"\n end\n\n repo = Keyword.get(opts, :repo)\n\n unless repo == nil or Storage.valid?(repo) do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.hash_object/2: repo #{inspect(repo)} is invalid\"\n end\n\n write? = Keyword.get(opts, :write?, false)\n\n unless is_boolean(write?) do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.hash_object/2: write? #{inspect(write?)} is invalid\"\n end\n\n if write? and repo == nil do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.hash_object/2: write?: true requires a repo to be specified\"\n end\n\n %{type: type, validate?: validate?, repo: repo, write?: write?}\n end\n\n defp apply_filters(object, _repository) do\n # TO DO: Implement filters as described in attributes (for instance,\n # end-of-line conversion). I expect this to happen by replacing the\n # ContentSource implementation with another implementation that would\n # perform the content remapping. For now, always a no-op.\n\n # https://github.com/elixir-git/xgit/issues/18\n\n object\n end\n\n defp annotate_with_size(%Object{content: content} = object),\n do: %{object | size: ContentSource.length(content)}\n\n defp validate_content(%Object{type: :blob} = object, _validate?), do: {:ok, object}\n defp validate_content(object, false = _validate?), do: {:ok, object}\n\n defp validate_content(%Object{content: content} = object, _validate?) when is_list(content) do\n case Object.check(object) do\n :ok -> cover {:ok, object}\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp validate_content(%Object{content: content} = object, _validate?) do\n validate_content(\n %{object | content: content |> ContentSource.stream() |> Enum.to_list() |> Enum.concat()},\n true\n )\n end\n\n defp assign_object_id(%Object{content: content, type: type} = object),\n do: %{object | id: ObjectId.calculate_id(content, type)}\n\n defp maybe_write_to_repo({:ok, object}, _repo, false = _write?), do: cover({:ok, object})\n\n defp maybe_write_to_repo({:ok, object}, repo, true = _write?) do\n case Storage.put_loose_object(repo, object) do\n :ok -> cover {:ok, object}\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp maybe_write_to_repo({:error, reason}, _repo, _write?), do: cover({:error, reason})\n\n defp hash_object_result({:ok, %Object{id: id}}, _opts), do: cover({:ok, id})\n defp hash_object_result({:error, reason}, _opts), do: cover({:error, reason})\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `cat_file/2`.\n \"\"\"\n @type cat_file_reason :: :invalid_object_id | Storage.get_object_reason()\n\n @doc ~S\"\"\"\n Retrieves the content, type, and size information for a single object in a\n repository's object store.\n\n Analogous to the first form of [`git cat-file`](https://git-scm.com/docs/git-cat-file).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n `object_id` is a string identifying the object.\n\n ## Return Value\n\n `{:ok, object}` if the object could be found. `object` is an instance of\n `Xgit.Object` and can be used to retrieve content and other information\n about the underlying git object.\n\n `{:error, :invalid_object_id}` if `object_id` can't be parsed as a valid git object ID.\n\n `{:error, :not_found}` if the object does not exist in the database.\n\n `{:error, :invalid_object}` if object was found, but invalid.\n \"\"\"\n @spec cat_file(repository :: Storage.t(), object_id :: ObjectId.t()) ::\n {:ok, Object} | {:error, reason :: cat_file_reason}\n def cat_file(repository, object_id) when is_pid(repository) and is_binary(object_id) do\n repository = Storage.assert_valid(repository)\n\n if ObjectId.valid?(object_id) do\n Storage.get_object(repository, object_id)\n else\n cover {:error, :invalid_object_id}\n end\n end\n\n ## --- Tree Objects ---\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `cat_file_tree/2`.\n \"\"\"\n @type cat_file_tree_reason ::\n :invalid_object_id\n | Storage.get_object_reason()\n | Tree.from_object_reason()\n\n @doc ~S\"\"\"\n Retrieves a `tree` object from a repository's object store and renders\n it as an `Xgit.Tree` struct.\n\n Analogous to\n [`git cat-file -p`](https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt--p)\n when the target object is a `tree` object.\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n `object_id` is a string identifying the object.\n\n ## Return Value\n\n `{:ok, tree}` if the object could be found and understood as a tree.\n `tree` is an instance of `Xgit.Tree` and can be used to retrieve\n references to the members of that tree.\n\n `{:error, :invalid_object_id}` if `object_id` can't be parsed as a valid git object ID.\n\n `{:error, reason}` if otherwise unable. The relevant reason codes may come from:\n\n * `Xgit.Repository.Storage.get_object/2`\n * `Xgit.Tree.from_object/1`.\n \"\"\"\n @spec cat_file_tree(repository :: Storage.t(), object_id :: ObjectId.t()) ::\n {:ok, tree :: Tree.t()} | {:error, reason :: cat_file_tree_reason}\n def cat_file_tree(repository, object_id) when is_pid(repository) and is_binary(object_id) do\n repository = Storage.assert_valid(repository)\n\n with {:object_id_valid?, true} <- {:object_id_valid?, ObjectId.valid?(object_id)},\n {:ok, object} <- Storage.get_object(repository, object_id) do\n Tree.from_object(object)\n else\n {:error, reason} -> cover {:error, reason}\n {:object_id_valid?, false} -> cover {:error, :invalid_object_id}\n end\n end\n\n ## --- Commit Objects ---\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `cat_file_commit/2`.\n \"\"\"\n @type cat_file_commit_reason ::\n :invalid_object_id\n | Commit.from_object_reason()\n | Storage.get_object_reason()\n\n @doc ~S\"\"\"\n Retrieves a `commit` object from a repository's object store and renders\n it as an `Xgit.Commit` struct.\n\n Analogous to\n [`git cat-file -p`](https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt--p)\n when the target object is a `commit` object.\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n `object_id` is a string identifying the object.\n\n ## Return Value\n\n `{:ok, commit}` if the object could be found and understood as a commit.\n `commit` is an instance of `Xgit.Commit` and can be used to retrieve\n references to the members of that commit.\n\n `{:error, :invalid_object_id}` if `object_id` can't be parsed as a valid git object ID.\n\n `{:error, reason}` if otherwise unable. The relevant reason codes may come from:\n\n * `Xgit.Commit.from_object/1`.\n * `Xgit.Repository.Storage.get_object/2`\n \"\"\"\n @spec cat_file_commit(repository :: Storage.t(), object_id :: ObjectId.t()) ::\n {:ok, commit :: Commit.t()} | {:error, reason :: cat_file_commit_reason}\n def cat_file_commit(repository, object_id) when is_pid(repository) and is_binary(object_id) do\n repository = Storage.assert_valid(repository)\n\n with {:object_id_valid?, true} <- {:object_id_valid?, ObjectId.valid?(object_id)},\n {:ok, object} <- Storage.get_object(repository, object_id) do\n Commit.from_object(object)\n else\n {:error, reason} -> cover {:error, reason}\n {:object_id_valid?, false} -> cover {:error, :invalid_object_id}\n end\n end\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `commit_tree/2`.\n \"\"\"\n @type commit_tree_reason ::\n :invalid_tree\n | :invalid_parents\n | :invalid_parent_ids\n | :invalid_message\n | :invalid_author\n | :invalid_committer\n | Storage.put_loose_object_reason()\n\n @doc ~S\"\"\"\n Creates a new commit object based on the provided tree object and parent commits.\n\n A commit object may have any number of parents. With exactly one parent, it is an\n ordinary commit. Having more than one parent makes the commit a merge between\n several lines of history. Initial (root) commits have no parents.\n\n Analogous to\n [`git commit-tree`](https://git-scm.com/docs/git-commit-tree).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n ## Options\n\n `tree`: (`Xgit.ObjectId`, required) ID of tree object\n\n `parents`: (list of `Xgit.ObjectId`) parent commit object IDs\n\n `message`: (byte list, required) commit message\n\n `author`: (`Xgit.PersonIdent`, required) author name, email, timestamp\n\n `committer`: (`Xgit.PersonIdent`) committer name, email timestamp\n (defaults to `author` if not specified)\n\n ## Return Value\n\n `{:ok, object_id}` with the object ID for the commit that was generated.\n\n `{:error, :invalid_tree}` if the `:tree` option refers to a tree that\n does not exist.\n\n `{:error, :invalid_parents}` if the `:parents` option is not a list.\n\n `{:error, :invalid_parent_ids}` if the `:parents` option contains any entries that\n do not reference valid commit objects.\n\n `{:error, :invalid_message}` if the `:message` option isn't a valid byte string.\n\n `{:error, :invalid_author}` if the `:author` option isn't a valid `PersonIdent` struct.\n\n `{:error, :invalid_committer}` if the `:committer` option isn't a valid `PersonIdent` struct.\n\n Reason codes may also come from `Xgit.Repository.Storage.put_loose_object/2`.\n \"\"\"\n @spec commit_tree(repository :: Storage.t(),\n tree: ObjectId.t(),\n parents: [ObjectId.t()],\n message: [byte],\n author: PersonIdent.t(),\n committer: PersonIdent.t()\n ) ::\n {:ok, object_id :: ObjectId.t()}\n | {:error, reason :: commit_tree_reason}\n def commit_tree(repository, opts \\\\ []) when is_pid(repository) do\n repository = Storage.assert_valid(repository)\n\n with {_tree, _parents, _message, _author, _committer} = verified_args <-\n validate_commit_tree_options(repository, opts),\n commit <- make_commit(verified_args),\n %{id: id} = object <- Commit.to_object(commit),\n :ok <- Storage.put_loose_object(repository, object) do\n cover {:ok, id}\n else\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp validate_commit_tree_options(repository, opts) do\n with {:ok, tree_id} <- validate_tree(repository, Keyword.get(opts, :tree)),\n {:ok, parent_ids} <- validate_parents(repository, Keyword.get(opts, :parents)),\n {:ok, message} <- validate_message(Keyword.get(opts, :message)),\n {:ok, author} <- validate_person_ident(Keyword.get(opts, :author), :invalid_author),\n {:ok, committer} <-\n validate_person_ident(Keyword.get(opts, :committer, author), :invalid_committer) do\n cover {tree_id, parent_ids, message, author, committer}\n else\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp validate_tree(repository, tree_id) do\n with true <- ObjectId.valid?(tree_id),\n {:ok, %Object{id: id} = object} <- Storage.get_object(repository, tree_id),\n {:ok, _tree} <- Tree.from_object(object) do\n cover {:ok, id}\n else\n _ -> cover {:error, :invalid_tree}\n end\n end\n\n defp validate_parents(_repository, nil), do: cover({:ok, []})\n\n defp validate_parents(repository, parent_ids) when is_list(parent_ids) do\n if Enum.all?(parent_ids, &commit_id_valid?(repository, &1)) do\n cover {:ok, parent_ids}\n else\n cover {:error, :invalid_parent_ids}\n end\n end\n\n defp validate_parents(_repository, _parents), do: cover({:error, :invalid_parents})\n\n defp commit_id_valid?(repository, parent_id) do\n with true <- ObjectId.valid?(parent_id),\n {:ok, %Object{type: :commit}} <- Storage.get_object(repository, parent_id) do\n cover true\n else\n _ -> cover false\n end\n end\n\n defp validate_message(message) when is_list(message) do\n if Enum.all?(message, &is_integer/1) do\n cover {:ok, message}\n else\n cover {:error, :invalid_message}\n end\n end\n\n defp validate_message(_message), do: cover({:error, :invalid_message})\n\n defp validate_person_ident(person_ident, invalid_reason) do\n if PersonIdent.valid?(person_ident) do\n cover {:ok, person_ident}\n else\n cover {:error, invalid_reason}\n end\n end\n\n defp make_commit({tree, parents, message, author, committer} = _verified_args) do\n %Commit{\n tree: tree,\n parents: parents,\n author: author,\n committer: committer,\n message: ensure_trailing_newline(message)\n }\n end\n\n defp ensure_trailing_newline(message) do\n if List.last(message) == 10 do\n message\n else\n message ++ '\\n'\n end\n end\n\n ## --- Tag Objects ---\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `cat_file_tag/2`.\n \"\"\"\n @type cat_file_tag_reason ::\n :invalid_object_id\n | Storage.get_object_reason()\n | Tag.from_object_reason()\n\n @doc ~S\"\"\"\n Retrieves a `tag` object from a repository's object store and renders\n it as an `Xgit.Tag` struct.\n\n Analogous to\n [`git cat-file -p`](https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt--p)\n when the target object is a `tag` object.\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n `object_id` is a string identifying the object.\n\n ## Return Value\n\n `{:ok, tag}` if the object could be found and understood as a tag.\n `tag` is an instance of `Xgit.Tag` and can be used to retrieve\n references to the members of that tag.\n\n `{:error, :invalid_object_id}` if `object_id` can't be parsed as a valid git object ID.\n\n `{:error, reason}` if otherwise unable. The relevant reason codes may come from:\n\n * `Xgit.Repository.Storage.get_object/2`\n * `Xgit.Tag.from_object/1`.\n \"\"\"\n @spec cat_file_tag(repository :: Storage.t(), object_id :: ObjectId.t()) ::\n {:ok, tag :: Tag.t()} | {:error, reason :: cat_file_tag_reason}\n def cat_file_tag(repository, object_id) when is_pid(repository) and is_binary(object_id) do\n repository = Storage.assert_valid(repository)\n\n with {:object_id_valid?, true} <- {:object_id_valid?, ObjectId.valid?(object_id)},\n {:ok, object} <- Storage.get_object(repository, object_id) do\n Tag.from_object(object)\n else\n {:error, reason} -> cover {:error, reason}\n {:object_id_valid?, false} -> cover {:error, :invalid_object_id}\n end\n end\n\n ## --- Working Tree ---\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `ls_files_stage/1`.\n \"\"\"\n @type ls_files_stage_reason :: DirCache.from_iodevice_reason()\n\n @doc ~S\"\"\"\n Retrieves information about files in the working tree as described by the index file.\n\n Analogous to\n [`git ls-files --stage`](https://git-scm.com/docs/git-ls-files#Documentation/git-ls-files.txt---stage).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n ## Return Value\n\n `{:ok, entries}`. `entries` will be a list of `Xgit.DirCache.Entry` structs\n in sorted order.\n\n `{:error, :bare}` if `repository` doesn't have a working tree.\n\n `{:error, reason}` if the index file for `repository` isn't valid. (See\n `Xgit.DirCache.from_iodevice/1` for possible reason codes.)\n \"\"\"\n @spec ls_files_stage(repository :: Storage.t()) ::\n {:ok, entries :: [DirCacheEntry.t()]}\n | {:error, reason :: ls_files_stage_reason}\n def ls_files_stage(repository) when is_pid(repository) do\n with {:ok, working_tree} <- working_tree_from_opts(repository),\n {:ok, %DirCache{entries: entries} = _dir_cache} <-\n WorkingTree.dir_cache(working_tree) do\n cover {:ok, entries}\n else\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n @typedoc ~S\"\"\"\n Cache info tuple `{mode, object_id, path}` to add to the index file.\n \"\"\"\n @type add_entry :: {mode :: FileMode.t(), object_id :: ObjectId.t(), path :: FilePath.t()}\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `update_index_cache_info/2`.\n \"\"\"\n @type update_index_cache_info_reason ::\n :invalid_entry\n | :bare\n | Xgit.Repository.WorkingTree.update_dir_cache_reason()\n\n @doc ~S\"\"\"\n Update the index file to reflect new contents.\n\n Analogous to the `--cacheinfo` form of\n [`git update-index`](https://git-scm.com/docs/git-update-index#Documentation/git-update-index.txt---cacheinfoltmodegtltobjectgtltpathgt).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to which the new entries should be written.\n\n `add`: a list of tuples of `{mode, object_id, path}` entries to add to the dir cache.\n In the event of collisions with existing entries, the existing entries will\n be replaced with the corresponding new entries.\n\n `remove`: a list of paths to remove from the dir cache. All versions of the file,\n regardless of stage, will be removed.\n\n ## Return Value\n\n `:ok` if successful.\n\n `{:error, :bare}` if `repository` doesn't have a working tree.\n\n `{:error, :invalid_entry}` if any tuple passed to `add` or `remove` was invalid.\n\n `{:error, :reason}` if unable. The relevant reason codes may come from\n `Xgit.Repository.WorkingTree.update_dir_cache/3`.\n \"\"\"\n @spec update_index_cache_info(\n repository :: Storage.t(),\n add :: [add_entry],\n remove :: [FilePath.t()]\n ) ::\n :ok | {:error, update_index_cache_info_reason()}\n def update_index_cache_info(repository, add, remove \\\\ [])\n when is_pid(repository) and is_list(add) and is_list(remove) do\n with {:ok, working_tree} <- working_tree_from_opts(repository),\n {:items_to_add, add} when is_list(add) <- {:items_to_add, parse_add_entries(add)},\n {:items_to_remove, remove} when is_list(remove) <-\n {:items_to_remove, parse_remove_entries(remove)} do\n WorkingTree.update_dir_cache(working_tree, add, remove)\n else\n {:items_to_add, _} -> cover {:error, :invalid_entry}\n {:items_to_remove, _} -> cover {:error, :invalid_entry}\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp parse_add_entries(add) do\n if Enum.all?(add, &valid_add?/1) do\n Enum.map(add, &map_add_entry/1)\n else\n cover :invalid\n end\n end\n\n defp valid_add?({mode, object_id, path})\n when is_file_mode(mode) and is_binary(object_id) and is_list(path),\n do: ObjectId.valid?(object_id) and FilePath.valid?(path)\n\n defp valid_add?(_), do: cover(false)\n\n defp map_add_entry({mode, object_id, path}) do\n %DirCacheEntry{\n name: path,\n stage: 0,\n object_id: object_id,\n mode: mode,\n size: 0,\n ctime: 0,\n ctime_ns: 0,\n mtime: 0,\n mtime_ns: 0,\n dev: 0,\n ino: 0,\n uid: 0,\n gid: 0,\n assume_valid?: false,\n extended?: false,\n skip_worktree?: false,\n intent_to_add?: false\n }\n end\n\n defp parse_remove_entries(remove) do\n if Enum.all?(remove, &valid_remove?/1) do\n Enum.map(remove, &map_remove_entry/1)\n else\n cover :invalid\n end\n end\n\n defp valid_remove?(name) when is_list(name), do: cover(true)\n defp valid_remove?(_), do: cover(false)\n\n defp map_remove_entry(name), do: cover({name, :all})\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `read_tree/3`.\n \"\"\"\n @type read_tree_reason :: :bare | WorkingTree.read_tree_reason()\n\n @doc ~S\"\"\"\n Read a `tree` object (and its descendants) and populate the index accordingly.\n\n Does not update files in the working tree itself.\n\n Analogous to [`git read-tree`](https://git-scm.com/docs/git-read-tree).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n `object_id` is the object ID of the root working tree. The special name `:empty`\n may be used to empty the index.\n\n ## Options\n\n `:missing_ok?`: `true` to ignore any objects that are referenced by the tree\n structures that are not present in the object database. Normally this would be an error.\n\n ## Return Value\n\n `:ok` if successful.\n\n `{:error, :bare}` if `repository` doesn't have a working tree.\n\n Reason codes may also come from the following functions:\n\n * `Xgit.DirCache.to_iodevice/2`\n * `Xgit.Repository.Storage.get_object/2`\n * `Xgit.Repository.Storage.WorkingTree.read_tree/3`\n * `Xgit.Tree.from_object/1`\n\n ## TO DO\n\n Implement `--prefix` option. https://github.com/elixir-git/xgit/issues/175\n \"\"\"\n @spec read_tree(repository :: Storage.t(), object_id :: ObjectId.t(), missing_ok?: boolean) ::\n :ok | {:error, reason :: read_tree_reason}\n def read_tree(repository, object_id, opts \\\\ [])\n when is_pid(repository) and (is_binary(object_id) or object_id == :empty) and is_list(opts) do\n with {:ok, working_tree} <- working_tree_from_opts(repository),\n _missing_ok? <- validate_read_tree_options(opts) do\n if object_id == :empty do\n WorkingTree.reset_dir_cache(working_tree)\n else\n WorkingTree.read_tree(working_tree, object_id, opts)\n end\n else\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp validate_read_tree_options(opts) do\n missing_ok? = Keyword.get(opts, :missing_ok?, false)\n\n unless is_boolean(missing_ok?) do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.read_tree/3: missing_ok? #{inspect(missing_ok?)} is invalid\"\n end\n\n missing_ok?\n end\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `write_tree/2`.\n \"\"\"\n @type write_tree_reason ::\n :bare\n | DirCache.to_tree_objects_reason()\n | DirCache.from_iodevice_reason()\n | Storage.put_loose_object_reason()\n | WorkingTree.write_tree_reason()\n\n @doc ~S\"\"\"\n Translates the current working tree, as reflected in its index file, to one or more\n tree objects.\n\n The working tree must be in a fully-merged state.\n\n Analogous to [`git write-tree`](https://git-scm.com/docs/git-write-tree).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n ## Options\n\n `:missing_ok?`: `true` to ignore any objects that are referenced by the index\n file that are not present in the object database. Normally this would be an error.\n\n `:prefix`: (`Xgit.FilePath`) if present, returns the `object_id` for the tree at\n the given subdirectory. If not present, writes a tree corresponding to the root.\n (The entire tree is written in either case.)\n\n ## Return Value\n\n `{:ok, object_id}` with the object ID for the tree that was generated. (If the exact tree\n specified by the index already existed, it will return that existing tree's ID.)\n\n `{:error, :bare}` if `repository` doesn't have a working tree.\n\n Reason codes may also come from the following functions:\n\n * `Xgit.DirCache.to_tree_objects/2`\n * `Xgit.DirCache.from_iodevice/1`\n * `Xgit.Repository.Storage.put_loose_object/2`\n * `Xgit.Repository.Storage.WorkingTree.write_tree/2`\n \"\"\"\n @spec write_tree(repository :: Storage.t(), missing_ok?: boolean, prefix: FilePath.t()) ::\n {:ok, object_id :: ObjectId.t()}\n | {:error, reason :: write_tree_reason}\n def write_tree(repository, opts \\\\ []) when is_pid(repository) do\n with {:ok, working_tree} <- working_tree_from_opts(repository),\n _ <- validate_write_tree_options(opts) do\n cover WorkingTree.write_tree(working_tree, opts)\n else\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp validate_write_tree_options(opts) do\n missing_ok? = Keyword.get(opts, :missing_ok?, false)\n\n unless is_boolean(missing_ok?) do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.write_tree/2: missing_ok? #{inspect(missing_ok?)} is invalid\"\n end\n\n prefix = Keyword.get(opts, :prefix, [])\n\n unless prefix == [] or FilePath.valid?(prefix) do\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.write_tree/2: prefix #{inspect(prefix)} is invalid (should be a charlist, not a String)\"\n end\n\n {missing_ok?, prefix}\n end\n\n ## -- References --\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `update_ref/4`.\n \"\"\"\n @type update_ref_reason :: Storage.put_ref_reason() | :target_not_commit\n\n @doc ~S\"\"\"\n Update the object name stored in a ref.\n\n Analogous to [`git update-ref`](https://git-scm.com/docs/git-update-ref).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) to search for the object.\n\n `name` is the name of the reference to update. (See `t/Xgit.Ref.name`.)\n\n `new_value` is the object ID to be written at this reference. (Use `Xgit.ObjectId.zero/0` to delete the reference.)\n\n ## Options\n\n `old_target`: If present, a ref with this name must already exist and the `target`\n value must match the object ID provided in this option. (There is a special value `:new`\n which instead requires that the named ref must **not** exist.)\n\n ## TO DO\n\n Follow symbolic links, but only if they start with `refs/`.\n (https://github.com/elixir-git/xgit/issues/241)\n\n ## Return Value\n\n `:ok` if written successfully.\n\n `{:error, :target_not_commit}` if the target object is not of type `commit`.\n\n Reason codes may also come from the following functions:\n\n * `Xgit.Repository.Storage.put_ref/3`\n * `Xgit.Repository.Storage.delete_ref/3`\n \"\"\"\n @spec update_ref(repository :: Storage.t(), name :: Ref.name(), new_value :: ObjectId.t(),\n old_target: ObjectId.t()\n ) :: :ok | {:error, reason :: update_ref_reason}\n def update_ref(repository, name, new_value, opts \\\\ [])\n when is_pid(repository) and is_binary(name) and is_binary(new_value) and is_list(opts) do\n repository = Storage.assert_valid(repository)\n repo_opts = validate_update_ref_opts(opts)\n\n if new_value == ObjectId.zero() do\n Storage.delete_ref(repository, name, repo_opts)\n else\n put_ref(repository, name, new_value, repo_opts)\n end\n end\n\n defp validate_update_ref_opts(opts) do\n case validate_old_target(Keyword.get(opts, :old_target, nil)) do\n nil -> cover []\n old_target -> cover [{:old_target, old_target}]\n end\n end\n\n defp validate_old_target(nil) do\n cover nil\n end\n\n defp validate_old_target(:new) do\n cover :new\n end\n\n defp validate_old_target(old_target) do\n if ObjectId.valid?(old_target) do\n cover old_target\n else\n raise ArgumentError,\n \"Xgit.Repository.Plumbing.update_ref/4: old_target #{inspect(old_target)} is invalid\"\n end\n end\n\n defp put_ref(repository, name, new_value, repo_opts) do\n with {:object, {:ok, %Object{type: type}}} <-\n {:object, Storage.get_object(repository, new_value)},\n {:type, :commit} <- {:type, type} do\n Storage.put_ref(repository, %Ref{name: name, target: new_value}, repo_opts)\n else\n {:object, {:error, :not_found}} -> cover {:error, :target_not_found}\n {:type, _} -> cover {:error, :target_not_commit}\n end\n end\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `get_symbolic_ref/2`.\n \"\"\"\n @type get_symbolic_ref_reason :: :not_symbolic_ref | Storage.get_ref_reason()\n\n @doc ~S\"\"\"\n Returns the target ref for an existing symbolic ref.\n\n Analogous to the one-argument form of\n [`git symbolic-ref`](https://git-scm.com/docs/git-symbolic-ref).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) in which to create the symbolic reference.\n\n `name` is the name of the symbolic reference to read. (See `t/Xgit.Ref.name`.)\n\n ## Return Value\n\n `{:ok, ref_name}` if read successfully. `ref_name` is the name of the targeted reference.\n\n `{:error, :not_symbolic_ref}` if `name` refers to a ref that is not a symbolic ref.\n\n Reason codes may also come from the following functions:\n\n * `Xgit.Repository.Storage.get_ref/3`\n \"\"\"\n @spec get_symbolic_ref(\n repository :: Storage.t(),\n name :: Ref.name()\n ) :: {:ok, name :: Ref.name()} | {:error, reason :: get_symbolic_ref_reason}\n def get_symbolic_ref(repository, name) when is_pid(repository) and is_binary(name) do\n repository = Storage.assert_valid(repository)\n\n case Storage.get_ref(repository, name, follow_link?: false) do\n {:ok, %Ref{target: \"ref: \" <> target}} ->\n cover {:ok, target}\n\n {:error, :enotdir} ->\n cover {:error, :not_found}\n\n {:error, reason} ->\n cover {:error, reason}\n\n {:ok, _} ->\n cover {:error, :not_symbolic_ref}\n end\n end\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `put_symbolic_ref/4`.\n \"\"\"\n @type put_symbolic_ref_reason :: Storage.put_ref_reason()\n\n @doc ~S\"\"\"\n Creates or updates a symbolic ref to point at a specific branch.\n\n Analogous to the two-argument form of\n [`git symbolic-ref`](https://git-scm.com/docs/git-symbolic-ref).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) in which to create the symbolic reference.\n\n `name` is the name of the symbolic reference to create or update. (See `t/Xgit.Ref.name`.)\n\n `new_target` is the name of the reference that should be targeted by this symbolic reference.\n This reference need not exist.\n\n ## Options\n\n TO DO: Add option to specify ref log message.\n https://github.com/elixir-git/xgit/issues/251\n\n ## Return Value\n\n `:ok` if written successfully.\n\n Reason codes may also come from the following functions:\n\n * `Xgit.Repository.Storage.put_ref/3`\n \"\"\"\n @spec put_symbolic_ref(\n repository :: Storage.t(),\n name :: Ref.name(),\n new_target :: Ref.name(),\n opts :: Keyword.t()\n ) :: :ok | {:error, reason :: put_symbolic_ref_reason}\n def put_symbolic_ref(repository, name, new_target, opts \\\\ [])\n when is_pid(repository) and is_binary(name) and is_binary(new_target) and is_list(opts) do\n repository = Storage.assert_valid(repository)\n\n Storage.put_ref(repository, %Ref{name: name, target: \"ref: #{new_target}\"},\n follow_link?: false\n )\n end\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `delete_symbolic_ref/2`.\n \"\"\"\n @type delete_symbolic_ref_reason :: Storage.delete_ref_reason()\n\n @doc ~S\"\"\"\n Deletes a symbolic ref.\n\n Analogous to [`git symbolic-ref --delete`](https://git-scm.com/docs/git-symbolic-ref#Documentation/git-symbolic-ref.txt---delete).\n\n ## Parameters\n\n `repository` is the `Xgit.Repository.Storage` (PID) in which to create the symbolic reference.\n\n `name` is the name of the symbolic reference to delete. (See `t/Xgit.Ref.name`.)\n\n ## Return Value\n\n `:ok` if deleted successfully.\n\n Reason codes may also come from the following functions:\n\n * `Xgit.Repository.Storage.delete_ref/3`\n \"\"\"\n @spec delete_symbolic_ref(\n repository :: Storage.t(),\n name :: Ref.name()\n ) :: :ok | {:error, reason :: delete_symbolic_ref_reason}\n def delete_symbolic_ref(repository, name)\n when is_pid(repository) and is_binary(name) do\n repository\n |> Storage.assert_valid()\n |> Storage.delete_ref(name, follow_link?: false)\n end\n\n ## --- Options ---\n\n # Parse working tree and repository from arguments and options.\n\n defp working_tree_from_opts(repository, opts \\\\ []) when is_pid(repository) and is_list(opts) do\n repository = Storage.assert_valid(repository)\n\n case working_tree_from_repo_or_opts(repository, opts) do\n working_tree when is_pid(working_tree) -> cover {:ok, working_tree}\n nil -> cover {:error, :bare}\n end\n end\n\n defp working_tree_from_repo_or_opts(repository, _opts) do\n # TO DO: Allow working tree to be specified via options.\n # https://github.com/elixir-git/xgit/issues/133\n # (NOTE: Should follow through to ensure all relevant plumbing\n # modules have that option documented when implemented.)\n # For now, only recognize default working tree.\n\n Storage.default_working_tree(repository)\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"name":"lib/xgit/util/force_coverage.ex","source":"defmodule Xgit.Util.ForceCoverage do\n @moduledoc false\n\n # This module is intended for internal testing purposes only.\n # We use it to wrap literal returns from functions in a way that\n # makes them visible to code coverage tools.\n\n # When building dev or production releases, we use a more efficient\n # form; when building for test (i.e. coverage), we use a more\n # complicated form that defeats compiler inlining.\n\n # Inspired by discussion at\n # https://elixirforum.com/t/functions-returning-a-literal-are-not-seen-by-code-coverage/16812.\n\n # coveralls-ignore-start\n\n if Application.get_env(:xgit, :use_force_coverage?) do\n defmacro cover(false = x) do\n quote do\n inspect(unquote(x))\n unquote(x)\n end\n end\n\n defmacro cover(nil = x) do\n quote do\n inspect(unquote(x))\n unquote(x)\n end\n end\n\n defmacro cover(value) do\n quote do\n # credo:disable-for-next-line Credo.Check.Warning.BoolOperationOnSameValues\n false or unquote(value)\n end\n end\n else\n defmacro cover(value) do\n quote do\n unquote(value)\n end\n end\n end\n\n # coveralls-ignore-stop\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null],"name":"lib/xgit/util/comparison.ex","source":"defmodule Xgit.Util.Comparison do\n @moduledoc false\n\n # Internal common vocabulary for data types that can be compared and/or sorted.\n\n @typedoc \"\"\"\n Result of a comparison.\n \"\"\"\n @type result :: :lt | :eq | :gt\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,11,null,9,2,null,null,11,null,null,null,null,null,1,10,null,null,1,8,null,null],"name":"lib/xgit/file_content_source.ex","source":"defmodule Xgit.FileContentSource do\n @moduledoc ~S\"\"\"\n Implements `Xgit.ContentSource` to read content from a file on disk.\n \"\"\"\n\n import Xgit.Util.ForceCoverage\n\n @typedoc ~S\"\"\"\n Describes a file on disk which will be used for reading content.\n \"\"\"\n @type t :: %__MODULE__{\n path: Path.t(),\n size: non_neg_integer | :file_not_found\n }\n\n @enforce_keys [:path, :size]\n defstruct [:path, :size]\n\n @doc ~S\"\"\"\n Creates an `Xgit.FileContentSource` for a file on disk.\n \"\"\"\n @spec new(path :: Path.t()) :: t\n def new(path) when is_binary(path) do\n size =\n case File.stat(path) do\n {:ok, %File.Stat{size: size}} -> cover size\n _ -> cover :file_not_found\n end\n\n %__MODULE__{path: path, size: size}\n end\n\n defimpl Xgit.ContentSource do\n alias Xgit.FileContentSource, as: FCS\n @impl true\n def length(%FCS{size: :file_not_found}), do: raise(\"file not found\")\n def length(%FCS{size: size}), do: cover(size)\n\n @impl true\n def stream(%FCS{size: :file_not_found}), do: raise(\"file not found\")\n def stream(%FCS{path: path}), do: File.stream!(path, [:charlist], 2048)\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,144,null,null,null,null,1,null,null,null,null,null,null,null,null,null,1,1,28,1,3,null,null,null,null,null,null,null,null],"name":"lib/xgit/object_type.ex","source":"defmodule Xgit.ObjectType do\n @moduledoc ~S\"\"\"\n Describes the known git object types.\n\n There are four distinct object types that can be stored in a git repository.\n Xgit communicates internally about these object types using the following\n atoms:\n\n * `:blob`\n * `:tree`\n * `:commit`\n * `:tag`\n\n This module is intended to be `use`d. Doing so will create an `alias` to the module\n so as to make `ObjectType.t` available for typespecs and will `import` the\n `is_object_type/1` guard.\n \"\"\"\n\n import Xgit.Util.ForceCoverage\n\n @object_types [:blob, :tree, :commit, :tag]\n\n @typedoc ~S\"\"\"\n One of the four known git object types, expressed as an atom.\n \"\"\"\n @type t :: :blob | :tree | :commit | :tag\n\n @doc ~S\"\"\"\n Return `true` if the value is one of the four known git object types.\n \"\"\"\n @spec valid?(t :: term) :: boolean\n def valid?(t), do: t in @object_types\n\n @doc ~S\"\"\"\n This guard requires the value to be one of the four known git object types.\n \"\"\"\n defguard is_object_type(t) when t in @object_types\n\n @doc ~S\"\"\"\n Parses a byte list and converts it to an object-type atom.\n\n Returns `:error` if the byte list doesn't match any of the known-valid object types.\n \"\"\"\n @spec from_bytelist(value :: [byte]) :: t | :error\n def from_bytelist(value)\n\n def from_bytelist('blob'), do: cover(:blob)\n def from_bytelist('tree'), do: cover(:tree)\n def from_bytelist('commit'), do: cover(:commit)\n def from_bytelist('tag'), do: cover(:tag)\n def from_bytelist(value) when is_list(value), do: cover(:error)\n\n defmacro __using__(opts) do\n quote location: :keep, bind_quoted: [opts: opts] do\n alias Xgit.ObjectType\n import Xgit.ObjectType, only: [is_object_type: 1]\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null],"name":"lib/xgit/repository/invalid_repository_error.ex","source":"defmodule Xgit.Repository.InvalidRepositoryError do\n @moduledoc ~S\"\"\"\n Raised when a call is made to any `Xgit.Repository.*` API, but the\n process ID doesn't implement the `Xgit.Repository.Storage` API.\n \"\"\"\n\n defexception message: \"not a valid Xgit repository\"\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,116,null,null,null,null,null,null,null,null,null,null,null,null,null,5,5,null,null,null,null,7,3,4,null,null,null,null,null,null,null,null,26,null,null,null,null,32,1,null,null,null,31,31,null,null,null,null,30,null,null,1,null,null,null,null,null,30,null,null,22,30,null,null,null,24,23,null,23,23,null,20,null,1,3,null,null,null,5,null,null,19,null,19,1,null,18,null,null,null,null,23,null,1,null,null,22,null,null,null,1,null,21,null,2,null,null,6,null,null,null,6,4,null,2,null,null,null,null,null,17,null,8,null,null,1,null,null,8,null,null,null,null,15,null,2,null,null,13,null,null,null,4,null,null,null,null,null,null,5,null,null,null,55,55,55,null,null,null,459,459,null,null,null,13,13,null,null,4,null,null,null,null,null,null,null,56,56,null,56,null,null,null,null,null,null,56,null,11,11,null,45,1,null,5,null,null,1,null,44,44,null,44,1,null,43,null,null,null,null,null,null,3,3,null,3,null,null],"name":"lib/xgit/repository/in_memory.ex","source":"defmodule Xgit.Repository.InMemory do\n @moduledoc ~S\"\"\"\n Implementation of `Xgit.Repository.Storage` that stores content in memory.\n\n _WARNING:_ This is intended for testing purposes only. As the name implies,\n repository content is stored only in memory. When the process that implements\n this repository terminates, the content it stores is lost.\n \"\"\"\n use Xgit.Repository.Storage\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.ConfigEntry\n alias Xgit.ContentSource\n alias Xgit.Object\n alias Xgit.Ref\n\n @config_entries [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"bare\", value: \"false\"},\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"true\"},\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"logallrefupdates\",\n value: \"true\"\n },\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"repositoryformatversion\",\n value: \"0\"\n }\n ]\n\n @doc ~S\"\"\"\n Start an in-memory git repository.\n\n Use the functions in `Xgit.Repository.Storage` to interact with this\n repository process.\n\n Any options are passed through to `GenServer.start_link/3`.\n\n ## Return Value\n\n See `GenServer.start_link/3`.\n \"\"\"\n @spec start_link(opts :: Keyword.t()) :: GenServer.on_start()\n def start_link(opts \\\\ []), do: Storage.start_link(__MODULE__, opts, opts)\n\n @impl true\n def init(opts) when is_list(opts) do\n cover(\n {:ok,\n %{\n config: @config_entries,\n loose_objects: %{},\n refs: %{\"HEAD\" => %Ref{name: \"HEAD\", target: \"ref: refs/heads/master\"}}\n }}\n )\n end\n\n ## --- Objects ---\n\n @impl true\n def handle_has_all_object_ids?(%{loose_objects: objects} = state, object_ids) do\n has_all_objects? = Enum.all?(object_ids, fn object_id -> Map.has_key?(objects, object_id) end)\n cover {:ok, has_all_objects?, state}\n end\n\n @impl true\n def handle_get_object(state, object_id) do\n case get_object_imp(state, object_id) do\n %Object{} = object -> cover {:ok, object, state}\n nil -> cover {:error, :not_found, state}\n end\n end\n\n defp get_object_imp(%{loose_objects: objects} = _state, object_id) do\n # Currently only checks for loose objects.\n # TO DO: Look for object in packs.\n # https://github.com/elixir-git/xgit/issues/52\n\n Map.get(objects, object_id)\n end\n\n @impl true\n def handle_put_loose_object(%{loose_objects: loose_objects} = state, %Object{id: id} = object) do\n if Map.has_key?(loose_objects, id) do\n {:error, :object_exists, state}\n else\n # Convert any pending content into a byte list.\n # We don't bother with zlib compression here.\n new_objects = Map.put(loose_objects, id, maybe_read_object_content(object))\n cover {:ok, %{state | loose_objects: new_objects}}\n end\n end\n\n defp maybe_read_object_content(%Object{content: content} = object) when is_list(content),\n do: object\n\n defp maybe_read_object_content(%Object{content: content} = object),\n do: %{object | content: content |> ContentSource.stream() |> Enum.concat()}\n\n ## --- References ---\n\n @impl true\n def handle_list_refs(%{refs: refs} = state) do\n cover {:ok, refs |> Map.values() |> Enum.filter(&heads_only/1) |> Enum.sort(), state}\n end\n\n defp heads_only(%Ref{name: \"refs/heads/\" <> _}), do: cover(true)\n defp heads_only(_), do: cover(false)\n\n @impl true\n def handle_put_ref(%{refs: refs} = state, %Ref{name: name, target: target} = ref, opts) do\n with :ok <- verify_target(state, target),\n {:deref, new_name} <-\n {:deref, deref_sym_link(state, name, Keyword.get(opts, :follow_link?, true))},\n ref <- %{ref | name: new_name},\n {:old_target_matches?, true} <-\n {:old_target_matches?, old_target_matches?(refs, name, Keyword.get(opts, :old_target))} do\n cover {:ok, %{state | refs: Map.put(refs, new_name, ref)}}\n else\n {:error, reason} -> cover {:error, reason, state}\n {:old_target_matches?, _} -> cover {:error, :old_target_not_matched, state}\n end\n end\n\n defp verify_target(_state, \"ref: \" <> _), do: cover(:ok)\n\n defp verify_target(state, target) do\n object = get_object_imp(state, target)\n\n if object == nil do\n cover {:error, :target_not_found}\n else\n cover :ok\n end\n end\n\n defp deref_sym_link(%{refs: refs} = state, ref_name, true = _follow_link?) do\n case Map.get(refs, ref_name) do\n %Ref{target: \"ref: \" <> link_target} when link_target != ref_name ->\n deref_sym_link(state, link_target, true)\n\n _ ->\n ref_name\n end\n end\n\n defp deref_sym_link(_state, ref_name, _follow_link?), do: cover(ref_name)\n\n defp old_target_matches?(_refs, _name, nil), do: cover(true)\n\n defp old_target_matches?(refs, name, :new), do: not Map.has_key?(refs, name)\n\n defp old_target_matches?(refs, name, old_target),\n do: match?(%Ref{target: ^old_target}, Map.get(refs, name))\n\n @impl true\n def handle_delete_ref(%{refs: refs} = state, name, opts) do\n if old_target_matches?(refs, name, Keyword.get(opts, :old_target)) do\n cover {:ok, %{state | refs: Map.delete(refs, name)}}\n else\n cover {:error, :old_target_not_matched, state}\n end\n end\n\n @impl true\n def handle_get_ref(state, name, opts) do\n case get_ref_imp(state, name, Keyword.get(opts, :follow_link?, true)) do\n %Ref{name: ^name} = ref ->\n cover {:ok, ref, state}\n\n %Ref{name: link_target} = ref ->\n cover {:ok, %{ref | link_target: link_target, name: name}, state}\n\n nil ->\n cover {:error, :not_found, state}\n end\n end\n\n defp get_ref_imp(%{refs: refs} = state, name, true = _follow_link?) do\n case Map.get(refs, name) do\n %Ref{target: \"ref: \" <> link_target} when link_target != name ->\n get_ref_imp(state, link_target, true)\n\n x ->\n cover x\n end\n end\n\n defp get_ref_imp(%{refs: refs} = _state, name, _follow_link), do: Map.get(refs, name)\n\n ## --- Config ---\n\n @impl true\n def handle_get_config_entries(%{config: config} = state, [] = _opts) do\n # Optimized case for \"all\" entries.\n cover {:ok, config, state}\n end\n\n def handle_get_config_entries(%{config: config} = state, opts) when is_list(opts) do\n opts = Enum.into(opts, %{})\n matching_entries = Enum.filter(config, &matches_opts?(&1, opts))\n cover {:ok, matching_entries, state}\n end\n\n defp matches_opts?(item, %{section: section, name: name} = opts) do\n subsection = Map.get(opts, :subsection)\n item.section == section && item.subsection == subsection && item.name == name\n end\n\n defp matches_opts?(item, %{section: section} = opts) do\n subsection = Map.get(opts, :subsection)\n item.section == section && item.subsection == subsection\n end\n\n defp matches_opts?(_item, _opts), do: cover(true)\n\n @impl true\n def handle_add_config_entry(\n %{config: config} = state,\n %ConfigEntry{section: section, subsection: subsection, name: name} = entry,\n opts\n ) do\n add? = Keyword.get(opts, :add?, false)\n replace_all? = Keyword.get(opts, :replace_all?, false)\n\n opts =\n opts\n |> Enum.into(%{})\n |> Map.put(:section, section)\n |> Map.put(:subsection, subsection)\n |> Map.put(:name, name)\n\n cond do\n add? ->\n new_config = config ++ [entry]\n cover {:ok, %{state | config: new_config}}\n\n replace_all? ->\n new_config =\n config\n |> Enum.reject(&matches_opts?(&1, opts))\n |> Enum.concat([entry])\n\n cover {:ok, %{state | config: new_config}}\n\n true ->\n {replacing, remaining_config} = Enum.split_with(config, &matches_opts?(&1, opts))\n\n if Enum.count(replacing) > 1 do\n cover {:error, :replacing_multivar, state}\n else\n cover {:ok, %{state | config: remaining_config ++ [entry]}}\n end\n end\n end\n\n @impl true\n def handle_remove_config_entries(%{config: config} = state, opts) do\n opts = Map.new(opts)\n new_config = Enum.reject(config, &matches_opts?(&1, opts))\n\n cover {:ok, %{state | config: new_config}}\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,2784,null,2784,2782,2,null,null,null],"name":"lib/xgit/util/parse_charlist.ex","source":"defmodule Xgit.Util.ParseCharlist do\n @moduledoc false\n\n # Internal utility for parsing charlists with ambiguous encodings.\n\n import Xgit.Util.ForceCoverage\n\n @doc ~S\"\"\"\n Convert a list of bytes to an Elixir (UTF-8) string when the encoding is not\n definitively known. Try parsing as a UTF-8 byte array first, then try ISO-8859-1.\n \"\"\"\n @spec decode_ambiguous_charlist(b :: [byte]) :: String.t()\n def decode_ambiguous_charlist(b) when is_list(b) do\n raw = :erlang.list_to_binary(b)\n\n case :unicode.characters_to_binary(raw) do\n utf8 when is_binary(utf8) -> cover(utf8)\n _ -> :unicode.characters_to_binary(raw, :latin1)\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,174,3,null,null,null,null,null,null,null,null,null,null,null,null,null,218,null,null,null,null,null,null,null,null,299,228,null,null,4,null,222,1,5,null,2,26,null,null,514,514,null,null,null,515,420,null,null,null,2,null,null,null,null,null,514,null,null,1495,null,13,null,null,14,1482,1462,null,null,null,null,null,null,null,null,2,null,null,265,null,null,76,null,11,null],"name":"lib/xgit/ref.ex","source":"defmodule Xgit.Ref do\n @moduledoc ~S\"\"\"\n A reference is a struct that describes a mutable pointer to a commit or similar object.\n\n A reference is a key-value pair where the key is a name in a specific format\n (see [`git check-ref-format`](https://git-scm.com/docs/git-check-ref-format))\n and the value (`:target`) is either a SHA-1 hash or a reference to another reference key\n (i.e. `ref: (name-of-valid-ref)`).\n\n This structure contains the key-value pair and functions to validate both values.\n \"\"\"\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.ObjectId\n\n @typedoc ~S\"\"\"\n Name of a ref (typically `refs/heads/master` or similar).\n \"\"\"\n @type name :: String.t()\n\n @typedoc ~S\"\"\"\n Target for a ref. Can be either an `Xgit.ObjectId` or a string of the form\n `\"ref: refs/...\"`.\n \"\"\"\n @type target :: ObjectId.t() | String.t()\n\n @typedoc ~S\"\"\"\n This struct describes a single reference stored or about to be stored in a git\n repository.\n\n ## Struct Members\n\n * `:name`: the name of the reference (typically `refs/heads/master` or similar)\n * `:target`: the object ID currently marked by this reference or a symbolic link\n (`ref: refs/heads/master` or similar) to another reference\n * `:link_target`: the name of another reference which is targeted by this ref\n \"\"\"\n @type t :: %__MODULE__{\n name: name(),\n target: target(),\n link_target: name() | nil\n }\n\n @enforce_keys [:name, :target]\n defstruct [:name, :target, :link_target]\n\n @doc ~S\"\"\"\n Return `true` if the string describes a valid reference name.\n \"\"\"\n @spec valid_name?(name :: any) :: boolean\n def valid_name?(name) when is_binary(name), do: valid_name?(name, false, false)\n def valid_name?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Return `true` if the struct describes a valid reference.\n\n ## Options\n\n `allow_one_level?`: Set to `true` to disregard the rule requiring at least one `/`\n in name. (Similar to `--allow-onelevel` option.)\n\n `refspec?`: Set to `true` to allow a single `*` in the pattern. (Similar to\n `--refspec-pattern` option.)\n \"\"\"\n @spec valid?(ref :: any, allow_one_level?: boolean, refspec?: boolean) :: boolean\n def valid?(ref, opts \\\\ [])\n\n def valid?(%__MODULE__{name: name, target: target} = ref, opts)\n when is_binary(name) and is_binary(target)\n when is_list(opts) do\n valid_name?(\n name,\n Keyword.get(opts, :allow_one_level?, false),\n Keyword.get(opts, :refspec?, false)\n ) && valid_target?(target) &&\n valid_name_or_nil?(Map.get(ref, :link_target))\n end\n\n def valid?(_, _opts), do: cover(false)\n\n defp valid_name_or_nil?(nil), do: cover(true)\n defp valid_name_or_nil?(\"refs/\" <> _ = target_name), do: cover(valid_name?(target_name))\n defp valid_name_or_nil?(_), do: cover(false)\n\n defp valid_name?(\"@\", _, _), do: cover(false)\n defp valid_name?(\"HEAD\", _, _), do: cover(true)\n\n defp valid_name?(name, true, false) do\n all_components_valid?(name) && not Regex.match?(~r/[\\x00-\\x20\\\\\\?\\[:^\\x7E\\x7F]/, name) &&\n not String.ends_with?(name, \".\") && not String.contains?(name, \"@{\")\n end\n\n defp valid_name?(name, false, false) do\n String.contains?(name, \"/\") && valid_name?(name, true, false) &&\n not String.contains?(name, \"*\")\n end\n\n defp valid_name?(name, false, true) do\n String.contains?(name, \"/\") && valid_name?(name, true, false) && at_most_one_asterisk?(name)\n end\n\n defp all_components_valid?(name) do\n name\n |> String.split(\"/\")\n |> Enum.all?(&name_component_valid?/1)\n end\n\n defp name_component_valid?(component), do: not name_component_invalid?(component)\n\n defp name_component_invalid?(\"\"), do: cover(true)\n\n defp name_component_invalid?(component) do\n String.starts_with?(component, \".\") ||\n String.ends_with?(component, \".lock\") ||\n String.contains?(component, \"..\")\n end\n\n @asterisk_re ~r/\\*/\n\n defp at_most_one_asterisk?(name) do\n @asterisk_re\n |> Regex.scan(name)\n |> Enum.count()\n |> Kernel.<=(1)\n end\n\n defp valid_target?(target), do: ObjectId.valid?(target) || valid_ref_target?(target)\n\n defp valid_ref_target?(\"ref: \" <> target_name),\n do: valid_name?(target_name, false, false) && String.starts_with?(target_name, \"refs/\")\n\n defp valid_ref_target?(_), do: cover(false)\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"name":"lib/xgit/repository/test/ref_test.ex","source":"defmodule Xgit.Repository.Test.RefTest do\n @moduledoc false\n\n # Not normally part of the public API, but available for implementors of\n # `Xgit.Repository.Storage` behaviour modules. Tests the callbacks related to\n # `Xgit.Ref` to ensure correct implementation of the core contracts.\n # Other tests may be necessary to ensure interop. (For example, the on-disk\n # repository test code adds more tests to ensure correct interop with\n # command-line git.)\n\n # Users of this module must provide a `setup` callback that provides a\n # `repo` member. This repository may be of any type, but should be \"empty.\"\n # An empty repo has the same data structures as an on-disk repo created\n # via `git init` in a previously-empty directory.\n\n import Xgit.Util.SharedTestCase\n\n define_shared_tests do\n alias Xgit.Object\n alias Xgit.Ref\n alias Xgit.Repository.Plumbing\n alias Xgit.Repository.Storage\n\n @test_content 'test content\\n'\n @test_content_id \"d670460b4b4aece5915caf5c68d12f560a9fe3e4\"\n\n describe \"get_ref/2 (shared)\" do\n test \"default repo contains HEAD reference\", %{repo: repo} do\n assert {:ok, %Xgit.Ref{name: \"HEAD\", target: \"ref: refs/heads/master\"}} =\n Storage.get_ref(repo, \"HEAD\", follow_link?: false)\n\n assert {:error, :not_found} = Storage.get_ref(repo, \"HEAD\", follow_link?: true)\n end\n\n test \"not_found case\", %{repo: repo} do\n assert {:error, :not_found} = Storage.get_ref(repo, \"refs/heads/master\")\n end\n\n test \"invalid_name case\", %{repo: repo} do\n assert {:error, :invalid_name} = Storage.get_ref(repo, \"refs/../../heads/master\")\n end\n end\n\n describe \"list_refs/2 (shared)\" do\n test \"null case\", %{repo: repo} do\n assert {:ok, []} = Storage.list_refs(repo)\n end\n end\n\n describe \"put_ref/3 (shared)\" do\n test \"error: invalid reference\", %{repo: repo} do\n assert {:error, :invalid_ref} =\n Storage.put_ref(repo, %Ref{\n name: \"refs/heads/master\",\n target: \"532ad3cb2518ad13a91e717998a26a6028df062\"\n })\n end\n\n test \"error: object must exist\", %{repo: repo} do\n assert {:error, :target_not_found} =\n Storage.put_ref(repo, %Ref{\n name: \"refs/heads/master\",\n target: \"532ad3cb2518ad13a91e717998a26a6028df0623\"\n })\n end\n\n test \"target reference need not exist\", %{repo: repo} do\n assert :ok =\n Storage.put_ref(repo, %Ref{\n name: \"refs/heads/mumble\",\n target: \"ref: refs/heads/other\"\n })\n\n assert {:ok, %Xgit.Ref{name: \"refs/heads/mumble\", target: \"ref: refs/heads/other\"}} =\n Storage.get_ref(repo, \"refs/heads/mumble\", follow_link?: false)\n end\n\n test \"object exists, but is not a commit\", %{repo: repo} do\n object = %Object{type: :blob, content: @test_content, size: 13, id: @test_content_id}\n :ok = Storage.put_loose_object(repo, object)\n\n assert :ok =\n Storage.put_ref(repo, %Ref{\n name: \"refs/heads/master\",\n target: @test_content_id\n })\n end\n\n test \"happy path: results visible to list_refs/1 and get_ref/2\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n {:ok, commit_id_other} =\n Plumbing.hash_object('shhh... another fake commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n other_ref = %Ref{\n name: \"refs/heads/other\",\n target: commit_id_other\n }\n\n assert :ok = Storage.put_ref(repo, other_ref)\n\n assert {:ok, ^master_ref} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, ^other_ref} = Storage.get_ref(repo, \"refs/heads/other\")\n\n assert {:ok, [^master_ref, ^other_ref]} = Storage.list_refs(repo)\n end\n\n test \"follows HEAD reference correctly\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n head_ref = %Ref{\n name: \"HEAD\",\n target: commit_id_master\n }\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n master_ref_via_head = %Ref{\n name: \"HEAD\",\n target: commit_id_master,\n link_target: \"refs/heads/master\"\n }\n\n assert :ok = Storage.put_ref(repo, head_ref)\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n assert {:ok, ^master_ref} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, ^master_ref_via_head} = Storage.get_ref(repo, \"HEAD\")\n end\n\n test \"can replace an existing object ID ref with a symbolic ref\", %{repo: repo} do\n {:ok, commit_id} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n foo_ref = %Ref{\n name: \"refs/heads/foo\",\n target: commit_id\n }\n\n assert :ok = Storage.put_ref(repo, foo_ref)\n\n assert {:ok, [^foo_ref]} = Storage.list_refs(repo)\n\n foo_ref2 = %Ref{\n name: \"refs/heads/foo\",\n target: \"ref: refs/heads/master\"\n }\n\n assert :ok = Storage.put_ref(repo, foo_ref2)\n\n assert {:ok, ^foo_ref2} = Storage.get_ref(repo, \"refs/heads/foo\", follow_link?: false)\n assert {:ok, [^foo_ref2]} = Storage.list_refs(repo)\n end\n\n test \"can retarget a symbolic ref by using follow_link?: false\", %{repo: repo} do\n foo_ref = %Ref{\n name: \"refs/heads/foo\",\n target: \"ref: refs/heads/master\"\n }\n\n assert :ok = Storage.put_ref(repo, foo_ref)\n\n assert {:ok, [^foo_ref]} = Storage.list_refs(repo)\n\n foo_ref2 = %Ref{\n name: \"refs/heads/foo\",\n target: \"ref: refs/heads/other\"\n }\n\n assert :ok = Storage.put_ref(repo, foo_ref2, follow_link?: false)\n\n assert {:ok, ^foo_ref2} = Storage.get_ref(repo, \"refs/heads/foo\", follow_link?: false)\n assert {:ok, [^foo_ref2]} = Storage.list_refs(repo)\n end\n\n test \":old_target option (correct match)\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n {:ok, commit_id2_master} =\n Plumbing.hash_object('shhh... another not commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref2 = %Ref{\n name: \"refs/heads/master\",\n target: commit_id2_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref2, old_target: commit_id_master)\n assert {:ok, [^master_ref2]} = Storage.list_refs(repo)\n end\n\n test \":old_target (incorrect match)\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n {:ok, commit_id2_master} =\n Plumbing.hash_object('shhh... another not commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref2 = %Ref{\n name: \"refs/heads/master\",\n target: commit_id2_master\n }\n\n assert {:error, :old_target_not_matched} =\n Storage.put_ref(repo, master_ref2,\n old_target: \"2075df9dff2b5a10ad417586b4edde66af849bad\"\n )\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n end\n\n test \"put_ref: :old_target (does not exist)\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n {:ok, commit_id2_master} =\n Plumbing.hash_object('shhh... another not commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref2 = %Ref{\n name: \"refs/heads/master2\",\n target: commit_id2_master\n }\n\n assert {:error, :old_target_not_matched} =\n Storage.put_ref(repo, master_ref2, old_target: commit_id_master)\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n end\n\n test \":old_target = :new\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref, old_target: :new)\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n end\n\n test \":old_target = :new, but target does exist\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n {:ok, commit_id2_master} =\n Plumbing.hash_object('shhh... another not commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref2 = %Ref{\n name: \"refs/heads/master\",\n target: commit_id2_master\n }\n\n assert {:error, :old_target_not_matched} =\n Storage.put_ref(repo, master_ref2, old_target: :new)\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n end\n end\n\n describe \"delete_ref/3 (shared)\" do\n test \"removes an existing ref\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n assert :ok = Storage.delete_ref(repo, \"refs/heads/master\")\n\n assert {:error, :not_found} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, []} = Storage.list_refs(repo)\n end\n\n test \"removes symbolic ref but not underlying ref\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n\n other_ref = %Ref{\n name: \"refs/heads/other\",\n target: \"ref: refs/heads/master\"\n }\n\n assert :ok = Storage.put_ref(repo, other_ref)\n\n assert :ok = Storage.delete_ref(repo, \"refs/heads/other\", follow_link?: false)\n\n assert {:error, :not_found} = Storage.get_ref(repo, \"refs/heads/other\")\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n end\n\n test \"quietly 'succeeds' if ref didn't exist\", %{repo: repo} do\n assert {:ok, []} = Storage.list_refs(repo)\n\n assert :ok = Storage.delete_ref(repo, \"refs/heads/master\")\n\n assert {:error, :not_found} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, []} = Storage.list_refs(repo)\n end\n\n test \"error if name invalid\", %{repo: repo} do\n assert {:ok, []} = Storage.list_refs(repo)\n\n assert {:error, :invalid_ref} = Storage.delete_ref(repo, \"refs\")\n\n assert {:error, :not_found} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, []} = Storage.list_refs(repo)\n end\n\n test \":old_target matches existing ref\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n assert :ok = Storage.delete_ref(repo, \"refs/heads/master\", old_target: commit_id_master)\n\n assert {:error, :not_found} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, []} = Storage.list_refs(repo)\n end\n\n test \"doesn't remove ref if :old_target doesn't match\", %{repo: repo} do\n {:ok, commit_id_master} =\n Plumbing.hash_object('shhh... not really a commit',\n repo: repo,\n type: :commit,\n validate?: false,\n write?: true\n )\n\n master_ref = %Ref{\n name: \"refs/heads/master\",\n target: commit_id_master\n }\n\n assert :ok = Storage.put_ref(repo, master_ref)\n\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n\n assert {:error, :old_target_not_matched} =\n Storage.delete_ref(repo, \"refs/heads/master\",\n old_target: \"bec43c416143e6b8bf9a3b559260185757e1386b\"\n )\n\n assert {:ok, ^master_ref} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, [^master_ref]} = Storage.list_refs(repo)\n end\n\n test \"error if :old_target specified and no ref exists\", %{repo: repo} do\n assert {:ok, []} = Storage.list_refs(repo)\n\n assert {:error, :old_target_not_matched} =\n Storage.delete_ref(repo, \"refs/heads/master\",\n old_target: \"bec43c416143e6b8bf9a3b559260185757e1386b\"\n )\n\n assert {:error, :not_found} = Storage.get_ref(repo, \"refs/heads/master\")\n assert {:ok, []} = Storage.list_refs(repo)\n end\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,14,15,10,null,null,2,null,null,null,null,null,57,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,45,null,null,2,null,null,45,39,33,29,27,21,15,null,null,13,null,null,null,null,null,null,null,null,32,null,null,null,null,21,18,null,12,null,null,3,null,null,6,null,null,null,3,10,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,8,1,null,null,7,7,7,7,7,null,null,null,null,null,null,7,null,null,null,null,null,null,null],"name":"lib/xgit/tag.ex","source":"defmodule Xgit.Tag do\n @moduledoc ~S\"\"\"\n Represents a git `tag` object in memory.\n \"\"\"\n alias Xgit.ContentSource\n alias Xgit.Object\n alias Xgit.ObjectId\n alias Xgit.ObjectType\n alias Xgit.PersonIdent\n alias Xgit.Ref\n\n use Xgit.ObjectType\n\n import Xgit.Util.ForceCoverage\n import Xgit.Util.ParseHeader, only: [next_header: 1]\n\n @typedoc ~S\"\"\"\n This struct describes a single `tag` object so it can be manipulated in memory.\n\n ## Struct Members\n\n * `:object`: (`Xgit.ObjectId`) object referenced by this tag\n * `:type`: (`Xgit.ObjectType`) type of the target object\n * `:name`: (bytelist) name of the tag\n * `:tagger`: (`Xgit.PersonIdent`) person who created the tag\n * `:message`: (bytelist) user-entered tag message (encoding unspecified)\n\n **TO DO:** Support signatures and other extensions.\n https://github.com/elixir-git/xgit/issues/202\n \"\"\"\n @type t :: %__MODULE__{\n object: ObjectId.t(),\n type: ObjectType.t(),\n name: [byte],\n tagger: PersonIdent.t() | nil,\n message: [byte]\n }\n\n @enforce_keys [:object, :type, :name, :message]\n defstruct [:object, :type, :name, :message, tagger: nil]\n\n @doc ~S\"\"\"\n Return `true` if the value is a tag struct that is valid.\n \"\"\"\n @spec valid?(tag :: any) :: boolean\n def valid?(tag)\n\n def valid?(%__MODULE__{\n object: object_id,\n type: object_type,\n name: name,\n tagger: tagger,\n message: message\n })\n when is_binary(object_id) and is_object_type(object_type) and is_list(name) and\n is_list(message) do\n ObjectId.valid?(object_id) &&\n not Enum.empty?(name) &&\n (tagger == nil || PersonIdent.valid?(tagger)) &&\n not Enum.empty?(message)\n end\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Return `true` if the value provided is valid as a tag name.\n \"\"\"\n @spec valid_name?(name :: any) :: boolean\n def valid_name?(name) when is_list(name), do: Ref.valid_name?(\"refs/tags/#{name}\")\n def valid_name?(_name), do: cover(false)\n\n @typedoc ~S\"\"\"\n Error response codes returned by `from_object/1`.\n \"\"\"\n @type from_object_reason :: :not_a_tag | :invalid_tag\n\n @doc ~S\"\"\"\n Renders a tag structure from an `Xgit.Object`.\n\n ## Return Values\n\n `{:ok, tag}` if the object contains a valid `tag` object.\n\n `{:error, :not_a_tag}` if the object contains an object of a different type.\n\n `{:error, :invalid_tag}` if the object says that is of type `tag`, but\n can not be parsed as such.\n \"\"\"\n @spec from_object(object :: Object.t()) :: {:ok, tag :: t} | {:error, from_object_reason}\n def from_object(object)\n\n def from_object(%Object{type: :tag, content: content} = _object) do\n content\n |> ContentSource.stream()\n |> Enum.to_list()\n |> from_object_internal()\n end\n\n def from_object(%Object{} = _object), do: cover({:error, :not_a_tag})\n\n defp from_object_internal(data) do\n with {:object, {'object', object_id_str, data}} <- {:object, next_header(data)},\n {:object_id, {object_id, []}} <- {:object_id, ObjectId.from_hex_charlist(object_id_str)},\n {:type_str, {'type', type_str, data}} <- {:type_str, next_header(data)},\n {:type, type} when is_object_type(type) <- {:type, ObjectType.from_bytelist(type_str)},\n {:name, {'tag', [_ | _] = name, data}} <- {:name, next_header(data)},\n {:tagger_id, tagger, data} <- optional_tagger(data),\n message when is_list(message) <- drop_if_lf(data) do\n # TO DO: Support signatures and other extensions.\n # https://github.com/elixir-git/xgit/issues/202\n cover {:ok,\n %__MODULE__{\n object: object_id,\n type: type,\n name: name,\n tagger: tagger,\n message: message\n }}\n else\n _ -> cover {:error, :invalid_tag}\n end\n end\n\n defp optional_tagger(data) do\n with {:tagger, {'tagger', tagger_str, data}} <- {:tagger, next_header(data)},\n {:tagger_id, %PersonIdent{} = tagger} <-\n {:tagger_id, PersonIdent.from_byte_list(tagger_str)} do\n cover {:tagger_id, tagger, data}\n else\n {:tagger, :no_header_found} ->\n cover {:tagger_id, nil, data}\n\n {:tagger_id, x} ->\n cover {:tagger_error, x}\n end\n end\n\n defp drop_if_lf([10 | data]), do: cover(data)\n defp drop_if_lf([]), do: cover([])\n defp drop_if_lf(_), do: cover(:error)\n\n @doc ~S\"\"\"\n Renders this tag structure into a corresponding `Xgit.Object`.\n\n If the tag structure is not valid, will raise `ArgumentError`.\n \"\"\"\n @spec to_object(commit :: t) :: Object.t()\n def to_object(commit)\n\n def to_object(\n %__MODULE__{\n object: object_id,\n type: object_type,\n name: tag_name,\n tagger: %PersonIdent{} = tagger,\n message: message\n } = tag\n ) do\n unless valid?(tag) do\n raise ArgumentError, \"Xgit.Tag.to_object/1: tag is not valid\"\n end\n\n rendered_tag =\n 'object #{object_id}\\n' ++\n 'type #{object_type}\\n' ++\n 'tag #{tag_name}\\n' ++\n 'tagger #{PersonIdent.to_external_string(tagger)}\\n' ++\n '\\n' ++\n message\n\n # TO DO: Support signatures and other extensions.\n # https://github.com/elixir-git/xgit/issues/202\n\n cover %Object{\n type: :tag,\n content: rendered_tag,\n size: Enum.count(rendered_tag),\n id: ObjectId.calculate_id(rendered_tag, :tag)\n }\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,146,136,128,128,128,128,null,null,null,null,null,null,18,null,null,null,136,null,null,null,1096,null,null,128,null,null,null,20,null,null,null,null,null,null,128,128,null,128,128,null,128,null,null,null,null,null,16,null,null,null,null,256,null,256,null,1183,null,null,256,null,null,null,112,null,112,112,null,112,null,null,null,null,null,null,null,null,null,null,null,null,67,null,null,null,null,null,null,null,37,null,2,null,35,null,null,37,null,2,null,35,null,null,37,37,null,37,null,36,null,1,null,null,37,null,8,null,29,null,null,37,null,null,null,null,null,null,null,null,null,null,null,63,null,15,null,null,null,null,null,null,null,null,null,33,null,null,null,null,null,4,null,null],"name":"lib/xgit/person_ident.ex","source":"# Copyright (C) 2007, Dave Watson \n# Copyright (C) 2007, Robin Rosenberg \n# Copyright (C) 2006-2008, Shawn O. Pearce \n# and other copyright owners as documented in the project's IP log.\n#\n# Elixir adaptation from jgit file:\n# org.eclipse.jgit/src/org/eclipse/jgit/lib/PersonIdent.java\n#\n# Copyright (C) 2019, Eric Scouten \n#\n# This program and the accompanying materials are made available\n# under the terms of the Eclipse Distribution License v1.0 which\n# accompanies this distribution, is reproduced below, and is\n# available at http://www.eclipse.org/org/documents/edl-v10.php\n#\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or\n# without modification, are permitted provided that the following\n# conditions are met:\n#\n# - Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#\n# - Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following\n# disclaimer in the documentation and/or other materials provided\n# with the distribution.\n#\n# - Neither the name of the Eclipse Foundation, Inc. nor the\n# names of its contributors may be used to endorse or promote\n# products derived from this software without specific prior\n# written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND\n# CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,\n# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\ndefmodule Xgit.PersonIdent do\n @moduledoc ~S\"\"\"\n A combination of a person identity and time in git.\n \"\"\"\n\n alias Xgit.Util.ParseCharlist\n alias Xgit.Util.ParseDecimal\n\n import Xgit.Util.ForceCoverage\n\n @typedoc \"Time zone offset in minutes +/- from GMT.\"\n @type tz_offset :: -720..840\n\n @typedoc ~S\"\"\"\n The tuple of name, email, time, and time zone that specifies who wrote or\n committed something.\n\n ## Struct Members\n\n * `:name`: (string) human-readable name of the author or committer\n * `:email`: (string) e-mail address for this person\n * `:when`: (integer) time in the Unix epoch in milliseconds\n * `:tz_offset`: (integer) time zone offset from GMT in minutes\n \"\"\"\n @type t :: %__MODULE__{\n name: String.t(),\n email: String.t(),\n when: integer,\n tz_offset: tz_offset()\n }\n\n @enforce_keys [:name, :email, :when, :tz_offset]\n defstruct [:name, :email, :when, :tz_offset]\n\n @doc ~S\"\"\"\n Parse a name line (e.g. author, committer, tagger) into a `PersonIdent` struct.\n\n ## Parameters\n\n `b` should be a charlist of an \"author\" or \"committer\" line pointing to the\n character after the header name and space.\n\n ## Return Value\n\n Returns a `PersonIdent` struct or `nil` if the charlist did not point to a\n properly-formatted identity.\n \"\"\"\n @spec from_byte_list(b :: [byte]) :: t() | nil\n def from_byte_list(b) when is_list(b) do\n with {_, [?< | email_start]} <- Enum.split_while(b, &(&1 != ?<)),\n true <- has_closing_angle_bracket?(email_start),\n {email, _} <- Enum.split_while(email_start, &(&1 != ?>)),\n name <- parse_name(b),\n {time, tz} <- parse_tz(email_start) do\n %__MODULE__{\n name: ParseCharlist.decode_ambiguous_charlist(name),\n email: ParseCharlist.decode_ambiguous_charlist(email),\n when: time,\n tz_offset: tz\n }\n else\n _ -> cover nil\n end\n end\n\n defp has_closing_angle_bracket?(b), do: Enum.any?(b, &(&1 == ?>))\n\n defp parse_name(b) do\n b\n |> Enum.take_while(&(&1 != ?<))\n |> Enum.reverse()\n |> drop_first_if_space()\n |> Enum.reverse()\n end\n\n defp drop_first_if_space([?\\s | b]), do: cover(b)\n defp drop_first_if_space(b), do: cover(b)\n\n defp parse_tz(first_email_start) do\n # Start searching from end of line, as after first name-email pair,\n # another name-email pair may occur. We will ignore all kinds of\n # \"junk\" following the first email.\n\n [?> | first_email_end] = Enum.drop_while(first_email_start, &(&1 != ?>))\n rev = Enum.reverse(first_email_end)\n\n {tz, rev} = trim_word_and_rev(rev)\n {time, _rev} = trim_word_and_rev(rev)\n\n case {time, tz} do\n {[_ | _], [_ | _]} ->\n {time |> ParseDecimal.from_decimal_charlist() |> elem(0),\n tz |> parse_timezone_offset() |> elem(0)}\n\n _ ->\n cover {0, 0}\n end\n end\n\n defp trim_word_and_rev(rev) do\n rev = Enum.drop_while(rev, &(&1 == ?\\s))\n\n word =\n rev\n |> Enum.take_while(&(&1 != ?\\s))\n |> Enum.reverse()\n\n cover {word, Enum.drop(rev, Enum.count(word))}\n end\n\n defp parse_timezone_offset(b) do\n {v, b} = ParseDecimal.from_decimal_charlist(b)\n\n tz_min = rem(v, 100)\n tz_hour = div(v, 100)\n\n cover {tz_hour * 60 + tz_min, b}\n end\n\n @doc ~S\"\"\"\n Sanitize the given string for use in an identity and append to output.\n\n Trims whitespace from both ends and special characters `\\n < >` that\n interfere with parsing; appends all other characters to the output.\n \"\"\"\n @spec sanitized(s :: String.t()) :: String.t()\n def sanitized(s) when is_binary(s) do\n s\n |> String.trim()\n |> String.replace(~r/[<>\\x00-\\x0C\\x0E-\\x1F]/, \"\")\n end\n\n @doc ~S\"\"\"\n Formats a timezone offset.\n \"\"\"\n @spec format_timezone(offset :: tz_offset()) :: String.t()\n def format_timezone(offset) when is_integer(offset) do\n sign =\n if offset < 0 do\n cover \"-\"\n else\n cover \"+\"\n end\n\n offset =\n if offset < 0 do\n cover -offset\n else\n offset\n end\n\n offset_hours = div(offset, 60)\n offset_mins = rem(offset, 60)\n\n hours_prefix =\n if offset_hours < 10 do\n cover \"0\"\n else\n cover \"\"\n end\n\n mins_prefix =\n if offset_mins < 10 do\n cover \"0\"\n else\n cover \"\"\n end\n\n cover \"#{sign}#{hours_prefix}#{offset_hours}#{mins_prefix}#{offset_mins}\"\n end\n\n @doc ~S\"\"\"\n Returns `true` if the struct is a valid `PersonIdent`.\n \"\"\"\n @spec valid?(person_ident :: any) :: boolean\n def valid?(person_ident)\n\n def valid?(%__MODULE__{name: name, email: email, when: whxn, tz_offset: tz_offset})\n when is_binary(name) and is_binary(email) and is_integer(whxn) and is_integer(tz_offset) and\n tz_offset >= -720 and tz_offset <= 840,\n do: cover(true)\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Formats the person identity for git storage.\n \"\"\"\n @spec to_external_string(person_ident :: t) :: String.t()\n def to_external_string(person_ident)\n\n def to_external_string(%__MODULE__{name: name, email: email, when: whxn, tz_offset: tz_offset})\n when is_binary(name) and is_binary(email) and is_integer(whxn) and is_integer(tz_offset) do\n cover \"#{sanitized(name)} <#{sanitized(email)}> #{div(whxn, 1000)} #{\n format_timezone(tz_offset)\n }\"\n end\n\n defimpl String.Chars do\n defdelegate to_string(person_ident), to: Xgit.PersonIdent, as: :to_external_string\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,350,null,null,350,350,350,null,null,null,493,null,null,null,null,null,null,557,null,555,null,64,null,null,491,null,null,null,41,514,null,null,350,348,348,null,null],"name":"lib/xgit/util/unzip_stream.ex","source":"defmodule Xgit.Util.UnzipStream do\n @moduledoc false\n\n # Internal utility that transforms a stream from a compressed\n # ZIP stream to uncompressed data.\n\n import Xgit.Util.ForceCoverage\n\n @doc ~S\"\"\"\n Transforms a stream from a compressed ZIP stream to uncompressed data.\n \"\"\"\n @spec unzip(compressed_stream :: Enum.t()) :: Enum.t()\n def unzip(compressed_stream),\n do: Stream.transform(compressed_stream, &start/0, &process_data/2, &finish/1)\n\n defp start do\n z = :zlib.open()\n :ok = :zlib.inflateInit(z)\n z\n end\n\n defp process_data(compressed_data, z) do\n cover {compressed_data\n |> process_all_data(z, [])\n |> Enum.reverse()\n |> Enum.concat(), z}\n end\n\n defp process_all_data(compressed_data, z, uncompressed_data_acc) do\n {status, iodata} = :zlib.safeInflate(z, compressed_data)\n\n case status do\n :continue ->\n process_all_data([], z, [to_byte_list(iodata) | uncompressed_data_acc])\n\n :finished ->\n cover [to_byte_list(iodata) | uncompressed_data_acc]\n end\n end\n\n defp to_byte_list([]), do: cover([])\n defp to_byte_list([b]) when is_binary(b), do: :binary.bin_to_list(b)\n\n defp finish(z) do\n :ok = :zlib.inflateEnd(z)\n :ok = :zlib.close(z)\n cover nil\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,701,null,834,8,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,162,null,4,3,null,null,1791,null,1791,1513,278,null,null,null,844,null,null,4,null,null,4,null,661,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,263,null,4,null,null,2358,2358,null,2358,2355,2349,2329,2020,1976,1912,1898,1890,1844,null,514,null,null,null,null,2358,3,null,2355,null,null,null,null,2355,6,null,2349,null,null,null,null,2349,97,20,null,2329,null,null,null,1937,null,null,392,83,309,null,null,null,7,7,7,7,7,7,null,7,253,525,null,4,4,4,null,null,173,32,null,141,null,null,null,1835,null,null,173,null,null,82,null,91,null,null,null,null,null,null,163,4,6,null,161,3,3,null,75,7,82,null,10,4,7,4,3,4,50,null,1871,null,null,105,64,null,41,null,null,null,1871,null,null,41,null,41,14,null,27,null,null,null,1815,null,null,83,null,83,8,null,75,null,null,null,1815,null,null,75,null,null,278,null,75,46,null,29,null,null,null,4,4,18,15,4,4,26,null,30,3,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,5,null,3,null,null,30,14,null,null,null,21,15,4,2,null,null,null,3,2,1,3,1,1,1,2,1,null,null,1,13,null,null,41,null,12,1,null,5,null,null,110,null,null,null,7,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,2,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,null,null,4,null,2,null,null,4,null,2,null,null,null,9,null,null,392,369,null,23,null,null,null,66,null,null,null,null,null,6,null,230,1089,null,null,null,null,null,null,null,null,null,105,806,268,null,null,null,null,null,null,null,35,null,null,null,268,null,null,null,null,null,null,1,null,null,274,null,null,66,33,null,241,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,25,7,18,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,91,null,null,null,null,101,null,null,21,null,null,64,null,null,17,null,14,null,7,30,70,null,null,91,27,null,64,null,null,null,null,7,2,null,5,null,null,null],"name":"lib/xgit/file_path.ex","source":"# Copyright (C) 2008-2010, Google Inc.\n# Copyright (C) 2008, Shawn O. Pearce \n# Copyright (C) 2016, 2018 Google Inc.\n# and other copyright owners as documented in the project's IP log.\n#\n# Elixir adaptation from jgit files:\n# org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java\n# org.eclipse.jgit/src/org/eclipse/jgit/util/Paths.java\n#\n# Copyright (C) 2019, Eric Scouten \n#\n# This program and the accompanying materials are made available\n# under the terms of the Eclipse Distribution License v1.0 which\n# accompanies this distribution, is reproduced below, and is\n# available at http://www.eclipse.org/org/documents/edl-v10.php\n#\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or\n# without modification, are permitted provided that the following\n# conditions are met:\n#\n# - Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#\n# - Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following\n# disclaimer in the documentation and/or other materials provided\n# with the distribution.\n#\n# - Neither the name of the Eclipse Foundation, Inc. nor the\n# names of its contributors may be used to endorse or promote\n# products derived from this software without specific prior\n# written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND\n# CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,\n# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,\n# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\ndefmodule Xgit.FilePath do\n @moduledoc ~S\"\"\"\n Describes a file path as stored in a git repo.\n\n Paths are always stored as a list of bytes. The git specification\n does not explicitly specify an encoding, but most commonly the\n path is interpreted as UTF-8.\n\n We use byte lists here to avoid confusion and possible misintepretation\n in Elixir's `String` type for non-UTF-8 paths.\n\n Paths are alternately referred to in git as \"file name,\" \"path,\"\n \"path name,\" and \"object name.\" We're using the name `FilePath`\n to avoid collision with Elixir's built-in `Path` module and to make\n it clear that we're talking about the path to where a file is stored\n on disk.\n \"\"\"\n\n use Bitwise\n use Xgit.FileMode\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.Util.Comparison\n\n @typedoc \"\"\"\n Representation of a file's path within a git repo.\n\n Typically, though not necessarily, interpreted as UTF-8.\n \"\"\"\n @type t :: [byte]\n\n @doc ~S\"\"\"\n Return `true` if the value is a valid file path.\n\n This performs the same checks as `check_path/2`, but folds away all of the potential\n error values to `false`.\n\n ## Parameters\n\n `path` is a UTF-8 byte list containing the path to be tested.\n\n ## Options\n\n * `windows?`: `true` to additionally verify that the path is permissible on Windows file systems\n * `macosx?`: `true` to additionally verify that the path is permissible on Mac OS X file systems\n \"\"\"\n @spec valid?(path :: any, windows?: boolean, macosx?: boolean) :: boolean\n def valid?(path, opts \\\\ [])\n\n def valid?(path, opts) when is_list(path) and is_list(opts), do: check_path(path, opts) == :ok\n def valid?(_path, _opts), do: cover(false)\n\n @typedoc ~S\"\"\"\n Error codes which can be returned by `check_path/2`.\n \"\"\"\n @type check_path_reason ::\n :invalid_name | :empty_path | :absolute_path | :duplicate_slash | :trailing_slash\n\n @typedoc ~S\"\"\"\n Error codes which can be returned by `check_path_segment/2`.\n \"\"\"\n @type check_path_segment_reason ::\n :invalid_name\n | :empty_name\n | :reserved_name\n | :invalid_utf8_sequence\n | :invalid_name_on_windows\n | :windows_device_name\n\n @doc ~S\"\"\"\n Check the provided path to see if it is a valid path within a git repository.\n\n The rules enforced here are slightly different from what is allowed in a `tree`\n object in that we allow `/` characters to build hierarchical paths.\n\n ## Parameters\n\n `path` is a UTF-8 byte list containing the path to be tested.\n\n ## Options\n\n * `windows?`: `true` to additionally verify that the path is permissible on Windows file systems\n * `macosx?`: `true` to additionally verify that the path is permissible on Mac OS X file systems\n\n ## Return Values\n\n * `:ok` if the name is permissible given the constraints chosen above\n * `{:error, :invalid_name}` if the name is not permissible\n * `{:error, :empty_path}` if the name is empty\n * `{:error, :absolute_path}` if the name starts with a `/`\n * `{:error, :duplicate_slash}` if the name contains two `/` characters in a row\n * `{:error, :trailing_slash}` if the name contains a trailing `/`\n\n See also: error return values from `check_path_segment/2`.\n \"\"\"\n @spec check_path(path :: t, windows?: boolean, macosx?: boolean) ::\n :ok | {:error, check_path_reason} | {:error, check_path_segment_reason}\n def check_path(path, opts \\\\ [])\n\n def check_path([], opts) when is_list(opts), do: cover({:error, :empty_path})\n def check_path([?/ | _], opts) when is_list(opts), do: cover({:error, :absolute_path})\n\n def check_path(path, opts) when is_list(path) and is_list(opts) do\n {first_segment, remaining_path} = Enum.split_while(path, &(&1 != ?/))\n\n case check_path_segment(first_segment, opts) do\n :ok -> check_remaining_path(remaining_path, opts)\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp check_remaining_path([], _opts), do: cover(:ok)\n\n defp check_remaining_path([?/], _opts),\n do: cover({:error, :trailing_slash})\n\n defp check_remaining_path([?/, ?/ | _remainder], _opts),\n do: cover({:error, :duplicate_slash})\n\n defp check_remaining_path([?/ | remainder], opts), do: check_path(remainder, opts)\n\n @doc ~S\"\"\"\n Check the provided path segment to see if it is a valid path within a git `tree`\n object.\n\n ## Parameters\n\n `path` is a UTF-8 byte list containing the path segment to be tested.\n\n ## Options\n\n * `windows?`: `true` to additionally verify that the path is permissible on Windows file systems\n * `macosx?`: `true` to additionally verify that the path is permissible on Mac OS X file systems\n\n ## Return Values\n\n * `:ok` if the name is permissible given the constraints chosen above\n * `{:error, :invalid_name}` if the name is not permissible\n * `{:error, :empty_name}` if the name is empty\n * `{:error, :reserved_name}` if the name is reserved for git's use (i.e. `.git`)\n * `{:error, :invalid_utf8_sequence}` if the name contains certain incomplete UTF-8 byte sequences\n (only when `macosx?: true` is selected)\n * `{:error, :invalid_name_on_windows}` if the name contains characters that are\n not allowed on Windows file systems (only when `windows?: true` is selected)\n * `{:error, :windows_device_name}` if the name matches a Windows device name (`aux`, etc.)\n (only when `windows?: true` is selected)\n \"\"\"\n @spec check_path_segment(path :: t, windows?: boolean, macosx?: boolean) ::\n :ok | {:error, check_path_segment_reason}\n def check_path_segment(path, opts \\\\ [])\n\n def check_path_segment([], opts) when is_list(opts), do: cover({:error, :empty_name})\n\n def check_path_segment(path_segment, opts) when is_list(path_segment) and is_list(opts) do\n windows? = Keyword.get(opts, :windows?, false)\n macosx? = Keyword.get(opts, :macosx?, false)\n\n with :ok <- refute_has_nil_bytes(path_segment),\n :ok <- refute_has_slash(path_segment),\n :ok <- check_windows_git_name(path_segment),\n :ok <- check_windows_characters(path_segment, windows?),\n :ok <- check_git_special_name(path_segment),\n :ok <- check_git_path_with_mac_ignorables(path_segment, macosx?),\n :ok <- check_truncated_utf8_for_mac(path_segment, macosx?),\n :ok <- check_illegal_windows_name_ending(path_segment, windows?),\n :ok <- check_windows_device_name(path_segment, windows?) do\n cover :ok\n else\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp refute_has_nil_bytes(path_segment) do\n if Enum.any?(path_segment, &(&1 == 0)) do\n cover {:error, :invalid_name}\n else\n cover :ok\n end\n end\n\n defp refute_has_slash(path_segment) do\n if Enum.any?(path_segment, &(&1 == ?/)) do\n cover {:error, :invalid_name}\n else\n cover :ok\n end\n end\n\n defp check_windows_git_name(path_segment) do\n with 5 <- Enum.count(path_segment),\n 'git~1' <- Enum.map(path_segment, &to_lower/1) do\n cover {:error, :invalid_name}\n else\n _ -> cover :ok\n end\n end\n\n defp check_windows_characters(_path_segment, false = _windows?), do: cover(:ok)\n\n defp check_windows_characters(path_segment, true = _windows?) do\n case Enum.find(path_segment, &invalid_on_windows?/1) do\n nil -> cover :ok\n _ -> cover {:error, :invalid_name_on_windows}\n end\n end\n\n defp invalid_on_windows?(?\"), do: cover(true)\n defp invalid_on_windows?(?*), do: cover(true)\n defp invalid_on_windows?(?:), do: cover(true)\n defp invalid_on_windows?(?<), do: cover(true)\n defp invalid_on_windows?(?>), do: cover(true)\n defp invalid_on_windows?(??), do: cover(true)\n defp invalid_on_windows?(?\\\\), do: cover(true)\n defp invalid_on_windows?(?|), do: cover(true)\n defp invalid_on_windows?(c) when c >= 1 and c <= 31, do: cover(true)\n defp invalid_on_windows?(_), do: cover(false)\n\n defp check_git_special_name('.'), do: cover({:error, :reserved_name})\n defp check_git_special_name('..'), do: cover({:error, :reserved_name})\n defp check_git_special_name('.git'), do: cover({:error, :reserved_name})\n\n defp check_git_special_name([?. | rem] = _name) do\n if normalized_git?(rem) do\n cover {:error, :reserved_name}\n else\n cover :ok\n end\n end\n\n defp check_git_special_name(_), do: cover(:ok)\n\n defp normalized_git?(name) do\n if git_name_prefix?(name) do\n name\n |> Enum.drop(3)\n |> valid_git_suffix?()\n else\n cover false\n end\n end\n\n # The simpler approach would be to convert this to a string and use\n # String.downcase/1 on it. But that would create a lot of garbage to collect.\n # This approach is a bit more cumbersome, but more efficient.\n defp git_name_prefix?([?g | it]), do: it_name_prefix?(it)\n defp git_name_prefix?([?G | it]), do: it_name_prefix?(it)\n defp git_name_prefix?(_), do: cover(false)\n\n defp it_name_prefix?([?i | it]), do: t_name_prefix?(it)\n defp it_name_prefix?([?I | it]), do: t_name_prefix?(it)\n defp it_name_prefix?(_), do: cover(false)\n\n defp t_name_prefix?([?t | _]), do: cover(true)\n defp t_name_prefix?([?T | _]), do: cover(true)\n defp t_name_prefix?(_), do: cover(false)\n\n defp valid_git_suffix?([]), do: cover(true)\n defp valid_git_suffix?(' '), do: cover(true)\n defp valid_git_suffix?('.'), do: cover(true)\n defp valid_git_suffix?('. '), do: cover(true)\n defp valid_git_suffix?(' .'), do: cover(true)\n defp valid_git_suffix?(' . '), do: cover(true)\n defp valid_git_suffix?(_), do: cover(false)\n\n defp check_git_path_with_mac_ignorables(_path_segment, false = _macosx?), do: cover(:ok)\n\n defp check_git_path_with_mac_ignorables(path_segment, true = _macosx?) do\n if match_mac_hfs_path?(path_segment, '.git') do\n cover {:error, :reserved_name}\n else\n cover :ok\n end\n end\n\n defp check_truncated_utf8_for_mac(_path_segment, false = _macosx?), do: cover(:ok)\n\n defp check_truncated_utf8_for_mac(path_segment, true = _macosx?) do\n tail3 = Enum.slice(path_segment, -2, 2)\n\n if Enum.any?(tail3, &(&1 == 0xE2 or &1 == 0xEF)) do\n cover {:error, :invalid_utf8_sequence}\n else\n cover :ok\n end\n end\n\n defp check_illegal_windows_name_ending(_path_segment, false = _windows?), do: cover(:ok)\n\n defp check_illegal_windows_name_ending(path_segment, true = _windows?) do\n last_char = List.last(path_segment)\n\n if last_char == ?\\s || last_char == ?. do\n cover {:error, :invalid_name_on_windows}\n else\n cover :ok\n end\n end\n\n defp check_windows_device_name(_path_segment, false = _windows?), do: cover(:ok)\n\n defp check_windows_device_name(path_segment, true = _windows?) do\n lc_name =\n path_segment\n |> Enum.map(&to_lower/1)\n |> Enum.take_while(&(&1 != ?.))\n\n if windows_device_name?(lc_name) do\n cover {:error, :windows_device_name}\n else\n cover :ok\n end\n end\n\n defp windows_device_name?('aux'), do: cover(true)\n defp windows_device_name?('con'), do: cover(true)\n defp windows_device_name?('com' ++ [d]), do: positive_digit?(d)\n defp windows_device_name?('lpt' ++ [d]), do: positive_digit?(d)\n defp windows_device_name?('nul'), do: cover(true)\n defp windows_device_name?('prn'), do: cover(true)\n defp windows_device_name?(_), do: cover(false)\n\n defp positive_digit?(b) when b >= ?1 and b <= ?9, do: cover(true)\n defp positive_digit?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Return `true` if the filename _could_ be read as a `.gitmodules` file when\n checked out to the working directory.\n\n This would seem like a simple comparison, but some filesystems have peculiar\n rules for normalizing filenames:\n\n NTFS has backward-compatibility support for 8.3 synonyms of long file\n names. (See\n https://web.archive.org/web/20160318181041/https://usn.pw/blog/gen/2015/06/09/filenames/\n for details.) NTFS is also case-insensitive.\n\n MacOS's HFS+ folds away ignorable Unicode characters in addition to case\n folding.\n\n ## Parameters\n\n `path` is a UTF-8 byte list containing the path to be tested.\n\n ## Options\n\n By default, this function will only check for the plain `.gitmodules` name.\n\n * `windows?`: `true` to additionally check for any path that might be treated\n as a `.gitmodules` file on Windows file systems\n * `macosx?`: `true` to additionally check for any path that might be treated\n as a `.gitmodules` file on Mac OS X file systems\n \"\"\"\n @spec gitmodules?(path :: t, windows?: boolean, macosx?: boolean) :: boolean\n def gitmodules?(path, opts \\\\ [])\n\n def gitmodules?('.gitmodules', opts) when is_list(opts), do: cover(true)\n\n def gitmodules?(path, opts) when is_list(opts) do\n (Keyword.get(opts, :windows?, false) and ntfs_gitmodules?(path)) or\n (Keyword.get(opts, :macosx?, false) and mac_hfs_gitmodules?(path))\n end\n\n defp ntfs_gitmodules?(name) do\n case Enum.count(name) do\n 8 -> ntfs_shortened_gitmodules?(Enum.map(name, &to_lower(&1)))\n 11 -> Enum.map(name, &to_lower(&1)) == '.gitmodules'\n _ -> cover false\n end\n end\n\n defp ntfs_shortened_gitmodules?('gitmod~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?('gi7eba~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?('gi7eb~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?('gi7e~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?('gi7~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?('gi~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?('g~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?('~' ++ rem), do: ntfs_numeric_suffix?(rem)\n defp ntfs_shortened_gitmodules?(_), do: cover(false)\n\n # The first digit of the numeric suffix must not be zero.\n defp ntfs_numeric_suffix?([?0 | _rem]), do: cover(false)\n defp ntfs_numeric_suffix?(rem), do: ntfs_numeric_suffix_zero_ok?(rem)\n\n defp ntfs_numeric_suffix_zero_ok?([c | rem]) when c >= ?0 and c <= ?9,\n do: ntfs_numeric_suffix_zero_ok?(rem)\n\n defp ntfs_numeric_suffix_zero_ok?([]), do: cover(true)\n defp ntfs_numeric_suffix_zero_ok?(_), do: cover(false)\n\n defp mac_hfs_gitmodules?(path), do: match_mac_hfs_path?(path, '.gitmodules')\n\n # http://www.utf8-chartable.de/unicode-utf8-table.pl?start=8192\n defp match_mac_hfs_path?(data, match, ignorable? \\\\ false)\n\n # U+200C 0xe2808c ZERO WIDTH NON-JOINER\n defp match_mac_hfs_path?([0xE2, 0x80, 0x8C | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+200D 0xe2808d ZERO WIDTH JOINER\n defp match_mac_hfs_path?([0xE2, 0x80, 0x8D | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+200E 0xe2808e LEFT-TO-RIGHT MARK\n defp match_mac_hfs_path?([0xE2, 0x80, 0x8E | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+200F 0xe2808f RIGHT-TO-LEFT MARK\n defp match_mac_hfs_path?([0xE2, 0x80, 0x8F | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+202A 0xe280aa LEFT-TO-RIGHT EMBEDDING\n defp match_mac_hfs_path?([0xE2, 0x80, 0xAA | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+202B 0xe280ab RIGHT-TO-LEFT EMBEDDING\n defp match_mac_hfs_path?([0xE2, 0x80, 0xAB | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+202C 0xe280ac POP DIRECTIONAL FORMATTING\n defp match_mac_hfs_path?([0xE2, 0x80, 0xAC | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+202D 0xe280ad LEFT-TO-RIGHT OVERRIDE\n defp match_mac_hfs_path?([0xE2, 0x80, 0xAD | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+202E 0xe280ae RIGHT-TO-LEFT OVERRIDE\n defp match_mac_hfs_path?([0xE2, 0x80, 0xAE | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n defp match_mac_hfs_path?([0xE2, 0x80, _ | _], _match, _ignorable?), do: cover(false)\n\n # U+206A 0xe281aa INHIBIT SYMMETRIC SWAPPING\n defp match_mac_hfs_path?([0xE2, 0x81, 0xAA | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+206B 0xe281ab ACTIVATE SYMMETRIC SWAPPING\n defp match_mac_hfs_path?([0xE2, 0x81, 0xAB | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+206C 0xe281ac INHIBIT ARABIC FORM SHAPING\n defp match_mac_hfs_path?([0xE2, 0x81, 0xAC | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+206D 0xe281ad ACTIVATE ARABIC FORM SHAPING\n defp match_mac_hfs_path?([0xE2, 0x81, 0xAD | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+206E 0xe281ae NATIONAL DIGIT SHAPES\n defp match_mac_hfs_path?([0xE2, 0x81, 0xAE | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n # U+206F 0xe281af NOMINAL DIGIT SHAPES\n defp match_mac_hfs_path?([0xE2, 0x81, 0xAF | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n defp match_mac_hfs_path?([0xE2, 0x81, _ | _], _match, _ignorable?), do: cover(false)\n\n defp match_mac_hfs_path?([0xEF, 0xBB, 0xBF | data], match, _ignorable?),\n do: match_mac_hfs_path?(data, match, true)\n\n defp match_mac_hfs_path?([0xEF, _, _ | _], _match, _ignorable?), do: cover(false)\n\n defp match_mac_hfs_path?([c | _] = _list, _match, _ignorable?)\n when c == 0xE2 or c == 0xEF,\n do: cover(false)\n\n defp match_mac_hfs_path?([c | data], [m | match], ignorable?) do\n if to_lower(c) == m do\n match_mac_hfs_path?(data, match, ignorable?)\n else\n cover(false)\n end\n end\n\n defp match_mac_hfs_path?([], [], _ignorable?), do: cover(true)\n\n # defp match_mac_hfs_path?([], [], ignorable?), do: ignorable?\n # TO DO: Understand what jgit was trying to accomplish with ignorable.\n # https://github.com/elixir-git/xgit/issues/34\n\n defp match_mac_hfs_path?(_data, _match, _ignorable?), do: cover(false)\n\n defp to_lower(b) when b >= ?A and b <= ?Z, do: cover(b + 32)\n defp to_lower(b), do: cover(b)\n\n @doc ~S\"\"\"\n Returns `true` if `path` starts with `prefix`.\n\n Unlike `String.starts_with?/2`, only accepts a single prefix path.\n \"\"\"\n @spec starts_with?(path :: t, prefix :: t) :: boolean\n def starts_with?(path, prefix)\n\n def starts_with?(_path, []), do: cover(true)\n def starts_with?([c | path], [c | prefix]), do: starts_with?(path, prefix)\n def starts_with?(_path, _prefix), do: cover(false)\n\n @doc ~S\"\"\"\n Ensure that a trailing `/` is present.\n\n **Exception:** If the path is empty, it will be returned as-is.\n \"\"\"\n @spec ensure_trailing_separator(path :: t) :: t\n def ensure_trailing_separator([]), do: cover([])\n\n def ensure_trailing_separator(path) when is_list(path) do\n # We strip trailing `/` because there might be more than one.\n strip_trailing_separator(path) ++ '/'\n end\n\n @doc ~S\"\"\"\n Remove trailing `/` if present.\n \"\"\"\n @spec strip_trailing_separator(path :: t) :: t\n def strip_trailing_separator([]), do: cover([])\n\n def strip_trailing_separator(path) when is_list(path) do\n if List.last(path) == ?/ do\n path\n |> Enum.reverse()\n |> Enum.drop_while(&(&1 == ?/))\n |> Enum.reverse()\n else\n cover path\n end\n end\n\n @doc ~S\"\"\"\n Compare two paths according to git path sort ordering rules.\n\n ## Return Value\n\n * `:lt` if `path1` sorts before `path2`.\n * `:eq` if they are the same.\n * `:gt` if `path1` sorts after `path2`.\n \"\"\"\n @spec compare(\n path1 :: t,\n mode1 :: FileMode.t(),\n path2 :: t,\n mode2 :: FileMode.t()\n ) :: Comparison.result()\n def compare(path1, mode1, path2, mode2)\n when is_list(path1) and is_file_mode(mode1) and is_list(path2) and is_file_mode(mode2) do\n case core_compare(path1, mode1, path2, mode2) do\n :eq -> mode_compare(mode1, mode2)\n x -> cover x\n end\n end\n\n @doc ~S\"\"\"\n Compare two paths, checking for identical name.\n\n Unlike `compare/4`, this method returns `:eq` when the paths have\n the same characters in their names, even if the mode differs. It is\n intended for use in validation routines detecting duplicate entries.\n\n ## Parameters\n\n `mode2` is the mode of the second file. Trees are sorted as though\n `List.last(path2) == ?/`, even if no such character exists.\n\n ## Return Value\n\n Returns `:eq` if the names are identical and a conflict exists\n between `path1` and `path2`, as they share the same name.\n\n Returns `:lt` if all possible occurrences of `path1` sort\n before `path2` and no conflict can happen. In a properly sorted\n tree there are no other occurrences of `path1` and therefore there\n are no duplicate names.\n\n Returns `:gt` when it is possible for a duplicate occurrence of\n `path1` to appear later, after `path2`. Callers should\n continue to examine candidates for `path2` until the method returns\n one of the other return values.\n \"\"\"\n @spec compare_same_name(path1 :: t, path2 :: t, mode2 :: FileMode.t()) ::\n Comparison.result()\n def compare_same_name(path1, path2, mode2),\n do: core_compare(path1, FileMode.tree(), path2, mode2)\n\n defp core_compare(path1, mode1, path2, mode2)\n\n defp core_compare([c | rem1], mode1, [c | rem2], mode2),\n do: core_compare(rem1, mode1, rem2, mode2)\n\n defp core_compare([c1 | _rem1], _mode1, [c2 | _rem2], _mode2),\n do: compare_chars(c1, c2)\n\n defp core_compare([c1 | _rem1], _mode1, [], mode2),\n do: compare_chars(band(c1, 0xFF), last_path_char(mode2))\n\n defp core_compare([], mode1, [c2 | _], _mode2),\n do: compare_chars(last_path_char(mode1), band(c2, 0xFF))\n\n defp core_compare([], _mode1, [], _mode2), do: cover(:eq)\n\n defp compare_chars(c, c), do: cover(:eq)\n defp compare_chars(c1, c2) when c1 < c2, do: cover(:lt)\n defp compare_chars(_, _), do: cover(:gt)\n\n defp last_path_char(mode) do\n if FileMode.tree?(mode) do\n cover ?/\n else\n cover 0\n end\n end\n\n defp mode_compare(mode1, mode2) do\n if FileMode.gitlink?(mode1) or FileMode.gitlink?(mode2) do\n cover :eq\n else\n compare_chars(last_path_char(mode1), last_path_char(mode2))\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,590,1,1,null,null,589,null,null,null,null,589,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,3,58,null,null,58,null,null,58,null,58,null,223,142,142,null,58,null,null,null,40,40,null,null,null,20,20,null,null,82,null,null,null,null,null,4123,null,null,null,null,621,null,null,null,3504,2,2,2,null,3502,null,null,null,619,null,null,4118,null,null,4110,null,null,null,null,null,null,4118,null,4118,null,null,4115,null,4113,3599,256,256,2,null,null,4111,null,null,null,null,24516,null,null,836,836,836,836,834,null,834,null,833,1,null,null,833,null,null,null,3282,null,1,3308,2,1,1,835,null,null,95,93,null,null,741,null,null,4115,null,4115,1540,null,2575,2573,null,null,null,155,30298,1,1,3027,null,null,2575,null,2575,2513,null,null,null,null,null,2511,null,62,null,null,null,null,95,93,null,null,null,1,null,null,null,1,null,null,null,2,null,93,1356,null,2513,null,null,167,null,167,1,52,1,113,null,null,null,null,11,null,2457,null,null,2615,2615,null,null,null,1,null,null,1,null,null,1,null,null,1,null,null,1,null,null,1,null,null,9,null,1,null,null,26,null,1540,null,null,1,1,null,null,null,2570,null,null,3,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,17,1,null,null,null,16,null,null,null,null,null,null,16,15,null,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,15,15,null,null,15,15,null,15,null,null,null,15,null,null,2,null,null,null,18,null,303,null,null,18,18,null,null,null,null,null,15,null,null,null,null,null,null,null,15,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,13,null,null,null,null,null,null,null,null,null,null,15,181,null,15,15,null,15,null,null,13,null,null,null,null,null,null,null,null,null,127,null,null,null,null,null,null,null,54,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,11,83,null,11,11,null,11,null,11,null,null,3,null,8,4,null,4,2,null,null,null,null,2,2,null,null,9,null,null,null,null,9,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,4,4,null,4,null,null,null,4,null,null,null,null,null,null,null,null,null,null,16,null,null,null,null,null,null,null,15,null,67,null,null,null,null,null,null,9,null,null,2,null,null,2,null,null,null,null,2,null,2,null,null,null,null,null,null,null,null,null,13,null,null,null,null,13,null,13,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,7,null,null,2,2,null,null,null,5,null,null,5,null,null,93,5,null,null,null,null,null,null,2,null,null,null,null,null,null,5,null,null,null,null,null,3,null,null,25,null,null,17,null,41,null,null,58,null,15,null,7,null,null,1,1,null,null],"name":"lib/xgit/config_file.ex","source":"defmodule Xgit.ConfigFile do\n @moduledoc ~S\"\"\"\n This GenServer monitors and potentially updates the contents\n of an on-disk git config file.\n\n See https://git-scm.com/docs/git-config for details on the config file format.\n \"\"\"\n\n use GenServer\n\n require Logger\n\n alias Xgit.ConfigEntry\n alias Xgit.Util.ObservedFile\n alias Xgit.Util.ParseCharlist\n\n import Xgit.Util.ForceCoverage\n\n @typedoc ~S\"\"\"\n Process ID for an `Xgit.ConfigFile` process.\n \"\"\"\n @type t :: pid\n\n defmodule Line do\n @moduledoc false\n\n # Wraps the public Xgit.ConfigEntry with some additional infrastructure\n # that lets us reconstruct the exact contents of the file.\n\n defstruct [:entry, :original, :section, :subsection]\n end\n\n @doc ~S\"\"\"\n Start a `ConfigFile` for a config file at the given path.\n\n The path (including parent directory) needs to exist, but there\n need not be a file at this path.\n \"\"\"\n @spec start_link(path :: Path.t()) :: GenServer.on_start()\n def start_link(path) when is_binary(path) do\n unless File.dir?(Path.dirname(path)) do\n raise ArgumentError,\n \"Xgit.ConfigFile.start_link/1: Parent of path #{path} must be an existing directory\"\n end\n\n GenServer.start_link(__MODULE__, path)\n end\n\n @impl true\n def init(path) when is_binary(path) do\n cover {:ok,\n ObservedFile.initial_state_for_path(path, &parse_config_at_path/1, &empty_config/0)}\n end\n\n @doc ~S\"\"\"\n Return any configuration entries that match the requested search.\n\n Entries will be returned in the order in which they appeared in the underlying file.\n\n ## Options\n\n * `section:` (`String`) if provided, only returns entries in the named section\n * `subsection:` (`String`) if provided, only returns entries in the named subsection\n (only meaningful if `section` is also provided)\n * `name:` (`String`) if provided, only returns entries with the given variable name\n (only meaningful if `section` is also provided)\n\n If `section` is provided but `subsection` is not, then only items within the top-level\n section (i.e. with no subsection) will be matched.\n\n If no options are provided, returns all entries.\n\n A list of `Xgit.ConfigEntry` structs that match the search parameters.\n \"\"\"\n @spec get_entries(config_file :: t,\n section: String.t(),\n subsection: String.t(),\n name: String.t()\n ) :: [Xgit.ConfigEntry.t()]\n def get_entries(config_file, opts \\\\ []) when is_pid(config_file) and is_list(opts),\n do: GenServer.call(config_file, {:get_entries, opts})\n\n defp handle_get_entries(%ObservedFile{} = of, opts) when is_list(opts) do\n %{parsed_state: lines} =\n of = ObservedFile.update_state_if_maybe_dirty(of, &parse_config_at_path/1, &empty_config/0)\n\n opts = Enum.into(opts, %{})\n\n entries =\n lines\n |> Enum.filter(&(&1.entry != nil))\n |> Enum.map(& &1.entry)\n |> Enum.filter(&matches_opts?(&1, opts))\n\n {:reply, entries, of}\n end\n\n defp matches_opts?(item, %{section: section, name: name} = opts) do\n subsection = Map.get(opts, :subsection)\n item.section == section && item.subsection == subsection && item.name == name\n end\n\n defp matches_opts?(item, %{section: section} = opts) do\n subsection = Map.get(opts, :subsection)\n item.section == section && item.subsection == subsection\n end\n\n defp matches_opts?(_item, _opts), do: cover(true)\n\n defp parse_config_at_path(path) do\n path\n |> File.stream!()\n |> Enum.to_list()\n |> Enum.map(&String.replace_suffix(&1, \"\\n\", \"\"))\n |> Enum.reduce([], &join_backslashed_lines/2)\n |> Enum.reverse()\n |> Enum.reduce({[], nil, nil}, &text_to_line/2)\n |> elem(0)\n |> Enum.reverse()\n end\n\n defp join_backslashed_lines(line, [most_recent_line | tail] = reversed_lines) do\n if String.ends_with?(most_recent_line, \"\\\\\") and\n not String.ends_with?(most_recent_line, \"\\\\\\\\\") do\n most_recent_line = String.replace_suffix(most_recent_line, \"\\\\\", \"\")\n cover [\"#{most_recent_line}\\n#{line}\" | tail]\n else\n cover [line | reversed_lines]\n end\n end\n\n defp join_backslashed_lines(line, reversed_lines), do: cover([line | reversed_lines])\n\n defp text_to_line(line, {reversed_lines, section, subsection}) do\n {section, subsection, entry} =\n charlist_to_entry(String.to_charlist(line), section, subsection)\n\n {[\n %__MODULE__.Line{entry: entry, original: line, section: section, subsection: subsection}\n | reversed_lines\n ], section, subsection}\n end\n\n defp charlist_to_entry(line, section, subsection) do\n remainder = Enum.drop_while(line, &whitespace?/1)\n\n {section, subsection, remainder} =\n read_optional_section_header(remainder, section, subsection)\n\n {var_name, value, remainder} = read_optional_variable(remainder)\n\n case Enum.drop_while(remainder, &whitespace?/1) do\n [] -> cover :ok\n [?# | _] -> cover :ok\n [?; | _] -> cover :ok\n _ -> raise ArgumentError, \"Illegal variable declaration: #{line}\"\n end\n\n {section, subsection, maybe_config_entry(section, subsection, var_name, value)}\n end\n\n defp whitespace?(?\\s), do: cover(true)\n defp whitespace?(?\\t), do: cover(true)\n defp whitespace?(_), do: cover(false)\n\n defp read_optional_section_header([?[ | remainder] = line, _section, _subsection) do\n remainder = Enum.drop_while(remainder, &whitespace?/1)\n {section, remainder} = Enum.split_while(remainder, §ion_name_char?/1)\n remainder = Enum.drop_while(remainder, &whitespace?/1)\n {subsection, remainder} = read_optional_subsection_header(remainder)\n remainder = Enum.drop_while(remainder, &whitespace?/1)\n\n remainder =\n case remainder do\n [?] | x] -> Enum.drop_while(x, &whitespace?/1)\n _ -> raise ArgumentError, \"Illegal section header #{line}\"\n end\n\n {section |> to_string() |> String.downcase(), subsection, remainder}\n end\n\n defp read_optional_section_header(remainder, section, subsection),\n do: cover({section, subsection, remainder})\n\n defp section_name_char?(c) when c >= ?A and c <= ?Z, do: cover(true)\n defp section_name_char?(c) when c >= ?a and c <= ?z, do: cover(true)\n defp section_name_char?(c) when c >= ?0 and c <= ?9, do: cover(true)\n defp section_name_char?(?-), do: cover(true)\n defp section_name_char?(?.), do: cover(true)\n defp section_name_char?(_), do: cover(false)\n\n defp read_optional_subsection_header([?\" | _] = remainder) do\n {subsection, remainder} = read_quoted_string(remainder)\n {to_string(subsection), remainder}\n end\n\n defp read_optional_subsection_header(remainder), do: cover({nil, remainder})\n\n defp read_optional_variable(remainder) do\n {var_name, remainder} = Enum.split_while(remainder, &var_name_char?/1)\n\n if Enum.empty?(var_name) do\n cover {nil, nil, remainder}\n else\n {value, remainder} = read_optional_value(remainder)\n cover {var_name |> to_string() |> String.downcase(), value, remainder}\n end\n end\n\n defp var_name_char?(c) when c >= ?A and c <= ?Z, do: cover(true)\n defp var_name_char?(c) when c >= ?a and c <= ?z, do: cover(true)\n defp var_name_char?(c) when c >= ?0 and c <= ?9, do: cover(true)\n defp var_name_char?(?-), do: cover(true)\n defp var_name_char?(_), do: cover(false)\n\n defp read_optional_value(remainder) do\n remainder = Enum.drop_while(remainder, &whitespace?/1)\n\n if List.first(remainder) == ?= do\n {value, remainder} =\n remainder\n |> Enum.drop(1)\n |> Enum.drop_while(&whitespace?/1)\n |> read_possibly_quoted_string()\n\n cover {ParseCharlist.decode_ambiguous_charlist(value), remainder}\n else\n cover {nil, remainder}\n end\n end\n\n defp read_quoted_string([?\" | remainder]) do\n {quoted_string, remainder} = read_quoted_string([], remainder)\n cover {Enum.reverse(quoted_string), remainder}\n end\n\n defp read_quoted_string(_acc, [?\\n | _remainder]) do\n raise ArgumentError, \"Illegal quoted string: Can not span a new line\"\n end\n\n defp read_quoted_string(_acc, []) do\n raise ArgumentError, \"Illegal quoted string: Missing close quote\"\n end\n\n defp read_quoted_string(acc, [?\\\\ | [c | remainder]]),\n do: read_quoted_string([c | acc], remainder)\n\n defp read_quoted_string(acc, [?\" | remainder]), do: cover({acc, remainder})\n defp read_quoted_string(acc, [c | remainder]), do: read_quoted_string([c | acc], remainder)\n\n defp read_possibly_quoted_string(remainder), do: read_possibly_quoted_string([], remainder)\n\n defp read_possibly_quoted_string(acc, [c | _] = remainder) when c == ?\\s or c == ?\\t do\n {whitespace, remainder} = Enum.split_while(remainder, &whitespace?/1)\n\n case remainder do\n [] -> cover {acc, remainder}\n [?; | _] -> cover {acc, []}\n [?# | _] -> cover {acc, []}\n x -> read_possibly_quoted_string(acc ++ whitespace, x)\n end\n end\n\n defp read_possibly_quoted_string(acc, [?\" | remainder]),\n do: read_quoted_value_section(acc, remainder)\n\n defp read_possibly_quoted_string(acc, []), do: cover({acc, []})\n\n defp read_possibly_quoted_string(acc, remainder) do\n {non_whitespace, remainder} = Enum.split_while(remainder, &(!whitespace?(&1)))\n read_possibly_quoted_string(acc ++ non_whitespace, remainder)\n end\n\n defp read_quoted_value_section(acc, [?\\\\ | [?\" | remainder]]),\n do: read_quoted_value_section(acc ++ [?\"], remainder)\n\n defp read_quoted_value_section(acc, [?\\\\ | [?\\\\ | remainder]]),\n do: read_quoted_value_section(acc ++ [?\\\\], remainder)\n\n defp read_quoted_value_section(acc, [?\\\\ | [?n | remainder]]),\n do: read_quoted_value_section(acc ++ [?\\n], remainder)\n\n defp read_quoted_value_section(acc, [?\\\\ | [?t | remainder]]),\n do: read_quoted_value_section(acc ++ [?\\t], remainder)\n\n defp read_quoted_value_section(acc, [?\\\\ | [?b | remainder]]),\n do: read_quoted_value_section(acc ++ [8], remainder)\n\n defp read_quoted_value_section(_acc, [?\\\\ | [c | _remainder]]),\n do: raise(ArgumentError, \"Invalid config file: Unknown escape sequence \\\\#{[c]}\")\n\n defp read_quoted_value_section(acc, [?\" | remainder]),\n do: read_possibly_quoted_string(acc, remainder)\n\n defp read_quoted_value_section(_acc, []), do: raise(ArgumentError, \"Incomplete quoted string\")\n\n defp read_quoted_value_section(acc, [c | remainder]),\n do: read_quoted_value_section(acc ++ [c], remainder)\n\n defp maybe_config_entry(_section, _subsection, nil = _var_name, _value), do: cover(nil)\n\n defp maybe_config_entry(nil = _section, _subsection, var_name, _value) do\n raise ArgumentError,\n \"Invalid config file: Assigning variable #{var_name} without a section header\"\n end\n\n defp maybe_config_entry(section, subsection, var_name, value) when is_binary(section) do\n cover(%ConfigEntry{section: section, subsection: subsection, name: var_name, value: value})\n end\n\n defp empty_config, do: cover([])\n\n @doc ~S\"\"\"\n Create or update a config value.\n\n ## Parameters\n\n `value` (`nil` or `String`) value to be added to this variable\n\n ## Options\n\n * `section`: **required** section to add the value to\n * `subsection`: _(optional)_ subsection to add the value to\n * `name`: **required** name of variable to update or replace\n * `add?`: if `true`, adds this value to any that may already exist\n * `replace_all?`: if `true`, removes all existing entries that match any keys provided\n\n ## Return Values\n\n `:ok` if successful.\n\n `{:error, :replacing_multivar}` if the existing variable has multiple variables.\n Replacing such a variable requires either `add?: true` or `replace_all?: true`.\n \"\"\"\n @spec update(config_file :: t, value :: nil | String.t(),\n section: String.t(),\n subsection: String.t(),\n name: String.t(),\n add?: boolean,\n replace_all?: boolean\n ) :: :ok | {:error, :replacing_multivar}\n def update(config_file, value, opts)\n when is_pid(config_file) and (is_nil(value) or is_binary(value)) and\n is_list(opts) do\n if Keyword.get(opts, :add?) && Keyword.get(opts, :replace_all?) do\n raise ArgumentError,\n \"Xgit.ConfigFile.update/3: add? and replace_all? can not both be true\"\n end\n\n entries = value_and_opts_to_entries(value, opts)\n\n # NOTE: Some of the implementation here carries over from an earlier\n # implementation which allowed multiple variables to be updated at once.\n # I've since removed that interface, but the implementation could still\n # support that.\n\n if Enum.all?(entries, &ConfigEntry.valid?/1) do\n GenServer.call(config_file, {:update, entries, opts})\n else\n raise ArgumentError,\n \"Xgit.ConfigFile.update/3: one or more entries are invalid\"\n end\n end\n\n defp value_and_opts_to_entries(value, opts) do\n [\n %ConfigEntry{\n section: Keyword.get(opts, :section),\n subsection: Keyword.get(opts, :subsection),\n name: Keyword.get(opts, :name),\n value: value\n }\n ]\n end\n\n defp handle_update(%ObservedFile{path: path} = of, entries, opts) do\n %{parsed_state: lines} =\n of = ObservedFile.update_state_if_maybe_dirty(of, &parse_config_at_path/1, &empty_config/0)\n\n add? = Keyword.get(opts, :add?, false)\n replace_all? = Keyword.get(opts, :replace_all?, false)\n\n namespaces = namespaces_from_entries(entries)\n\n lines\n |> new_config_lines([], entries, namespaces, add?, replace_all?)\n |> reply_write_new_lines(path, of)\n catch\n :throw, :replacing_multivar ->\n cover {:reply, {:error, :replacing_multivar}, of}\n end\n\n defp reply_write_new_lines(lines, path, of) do\n config_text =\n lines\n |> Enum.map(& &1.original)\n |> Enum.join(\"\\n\")\n\n File.write!(path, [config_text, \"\\n\"])\n cover {:reply, :ok, of}\n end\n\n defp namespaces_from_entries(entries) do\n entries\n |> Enum.map(&namespace_from_entry/1)\n |> MapSet.new()\n end\n\n defp namespace_from_entry(%ConfigEntry{\n section: section,\n subsection: subsection,\n name: name\n }) do\n {section, subsection, name}\n end\n\n defp new_config_lines(\n remaining_old_lines,\n new_lines_acc,\n entries_to_add,\n namespaces,\n add?,\n replace_all?\n )\n\n defp new_config_lines(\n remaining_old_lines,\n new_lines_acc,\n [] = _entries_to_add,\n _namespaces,\n _add?,\n _replace_all?\n ) do\n new_lines_acc ++ remaining_old_lines\n end\n\n defp new_config_lines(\n remaining_old_lines,\n new_lines_acc,\n entries_to_add,\n namespaces,\n add?,\n replace_all?\n ) do\n {before_match, match_and_after} =\n Enum.split_while(remaining_old_lines, &(!matches_any_namespace?(&1, namespaces)))\n\n existing_lines = new_lines_acc ++ before_match\n last_existing_line = List.last(existing_lines)\n\n {new_lines, remaining_old_lines, entries_to_add} =\n new_lines(match_and_after, entries_to_add, last_existing_line, add?, replace_all?)\n\n new_config_lines(\n remaining_old_lines,\n existing_lines ++ new_lines,\n entries_to_add,\n namespaces,\n add?,\n replace_all?\n )\n end\n\n defp matches_any_namespace?(%__MODULE__.Line{entry: nil}, _namespaces), do: cover(false)\n\n defp matches_any_namespace?(\n %__MODULE__.Line{\n entry: %ConfigEntry{section: section, subsection: subsection, name: name}\n },\n namespaces\n ) do\n MapSet.member?(namespaces, {section, subsection, name})\n end\n\n defp new_lines(match_and_after, entries_to_add, last_existing_line, add?, replace_all?)\n\n defp new_lines(\n [\n %__MODULE__.Line{\n entry: %{\n section: section,\n subsection: subsection,\n name: name\n }\n }\n | _\n ] = match_and_after,\n entries_to_add,\n last_existing_line,\n add?,\n replace_all?\n ) do\n {replacing_lines, remaining_lines} =\n Enum.split_with(match_and_after, &matches_namespace?(&1, section, subsection, name))\n\n {matching_entries_to_add, other_entries_to_add} =\n Enum.split_with(entries_to_add, &matches_namespace?(&1, section, subsection, name))\n\n replacing_multivar? = Enum.count(replacing_lines) > 1\n\n existing_matches_to_keep =\n cond do\n replace_all? ->\n cover []\n\n add? ->\n cover replacing_lines\n\n replacing_multivar? ->\n throw(:replacing_multivar)\n\n # Yes, this is flow control via exception.\n # Not sure there is a clean way to avoid this.\n\n true ->\n cover []\n end\n\n new_lines =\n maybe_insert_subsection(last_existing_line, section, subsection) ++\n existing_matches_to_keep ++\n Enum.map(matching_entries_to_add, &entry_to_line/1)\n\n {new_lines, remaining_lines, other_entries_to_add}\n end\n\n defp new_lines(\n [] = _match_and_after,\n [\n %ConfigEntry{\n section: section,\n subsection: subsection,\n name: name\n }\n | _\n ] = entries_to_add,\n last_existing_line,\n _add?,\n _replace_all?\n ) do\n {matching_entries_to_add, other_entries_to_add} =\n Enum.split_with(entries_to_add, &matches_namespace?(&1, section, subsection, name))\n\n new_lines =\n maybe_insert_subsection(last_existing_line, section, subsection) ++\n Enum.map(matching_entries_to_add, &entry_to_line/1)\n\n cover {new_lines, [], other_entries_to_add}\n end\n\n defp matches_namespace?(\n %__MODULE__.Line{\n entry: %ConfigEntry{section: section, subsection: subsection, name: name}\n },\n section,\n subsection,\n name\n ),\n do: cover(true)\n\n defp matches_namespace?(\n %ConfigEntry{section: section, subsection: subsection, name: name},\n section,\n subsection,\n name\n ),\n do: cover(true)\n\n defp matches_namespace?(_line, _section, _subsection, _name), do: cover(false)\n\n defp maybe_insert_subsection(\n %__MODULE__.Line{section: section, subsection: subsection},\n section,\n subsection\n ),\n do: cover([])\n\n defp maybe_insert_subsection(_line, section, nil),\n do: cover([%__MODULE__.Line{original: \"[#{section}]\", section: section}])\n\n defp maybe_insert_subsection(_line, section, subsection) do\n escaped_subsection =\n subsection\n |> String.replace(\"\\\\\", \"\\\\\\\\\")\n |> String.replace(~S(\"), ~S(\\\"))\n\n cover([\n %__MODULE__.Line{\n original: ~s([#{section} \"#{escaped_subsection}\"]),\n section: section,\n subsection: subsection\n }\n ])\n end\n\n defp entry_to_line(\n %ConfigEntry{section: section, subsection: subsection, name: name, value: value} = entry\n ) do\n escaped_value =\n value\n |> String.replace(\"\\\\\", \"\\\\\\\\\")\n |> String.replace(~S(\"), ~S(\\\"))\n\n cover %__MODULE__.Line{\n entry: entry,\n original: \"\\t#{name} = #{escaped_value}\",\n section: section,\n subsection: subsection\n }\n end\n\n @typedoc ~S\"\"\"\n Error codes that can be returned by `remove_entries/2`.\n \"\"\"\n @type remove_entries_reason :: File.posix()\n\n @doc ~S\"\"\"\n Removes all configuration entries that match the requested search.\n\n ## Options\n\n * `section:` (`String`) if provided, only removes entries in the named section\n * `subsection:` (`String`) if provided, only removes entries in the named subsection\n (only meaningful if `section` is also provided)\n * `name:` (`String`) if provided, only removes entries with the given variable name\n (only meaningful if `section` is also provided)\n\n If `section` is provided but `subsection` is not, then only items within the top-level\n section (i.e. with no subsection) will be removed.\n\n If no options are provided, removes all entries.\n\n Returns `:ok` regardless of whether any matching entries were found and removed.\n \"\"\"\n @spec remove_entries(config_file :: t,\n section: String.t(),\n subsection: String.t(),\n name: String.t()\n ) ::\n :ok | {:error, reason :: remove_entries_reason}\n def remove_entries(config_file, opts \\\\ []) when is_pid(config_file) and is_list(opts),\n do: GenServer.call(config_file, {:remove_entries, opts})\n\n defp handle_remove_entries(%ObservedFile{path: path} = of, []) do\n result = File.write(path, \"\")\n cover {:reply, result, of}\n end\n\n defp handle_remove_entries(%ObservedFile{path: path} = of, opts) when is_list(opts) do\n %{parsed_state: lines} =\n of = ObservedFile.update_state_if_maybe_dirty(of, &parse_config_at_path/1, &empty_config/0)\n\n opts = Enum.into(opts, %{})\n\n lines\n |> Enum.reject(&line_matches_opts?(&1, opts))\n |> reply_write_new_lines(path, of)\n end\n\n defp line_matches_opts?(\n %__MODULE__.Line{section: section, entry: %{name: name}} = line,\n %{section: section, name: name} = opts\n ),\n do: line.subsection == Map.get(opts, :subsection)\n\n defp line_matches_opts?(\n %__MODULE__.Line{section: section, entry: %{name: name1}} = _line,\n %{section: section, name: name2} = _opts\n )\n when is_binary(name1) and is_binary(name2),\n do: cover(false)\n\n defp line_matches_opts?(\n %__MODULE__.Line{section: section, subsection: subsection} = line,\n %{section: section, subsection: subsection} = opts\n ),\n do: line.subsection == Map.get(opts, :subsection)\n\n defp line_matches_opts?(_line, %{section: _section, name: name}) when not is_nil(name),\n do: cover(false)\n\n defp line_matches_opts?(%__MODULE__.Line{section: section} = line, %{section: section} = opts),\n do: line.subsection == Map.get(opts, :subsection)\n\n defp line_matches_opts?(_line, _opts), do: cover(false)\n\n @impl true\n def handle_call({:get_entries, opts}, _from, state), do: handle_get_entries(state, opts)\n\n def handle_call({:update, entries, opts}, _from, state), do: handle_update(state, entries, opts)\n\n def handle_call({:remove_entries, opts}, _from, state), do: handle_remove_entries(state, opts)\n\n def handle_call(message, _from, state) do\n Logger.warn(\"ConfigFile received unrecognized call #{inspect(message)}\")\n {:reply, {:error, :unknown_message}, state}\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,322,null,null,262,null,null,null,null,17,null,null,14,null],"name":"lib/xgit/content_source.ex","source":"defprotocol Xgit.ContentSource do\n @moduledoc ~S\"\"\"\n Protocol used for reading object content from various sources.\n \"\"\"\n\n @typedoc ~S\"\"\"\n Any value for which `ContentSource` protocol is implemented.\n \"\"\"\n @type t :: term\n\n @doc ~S\"\"\"\n Calculate the length (in bytes) of the content.\n \"\"\"\n @spec length(content :: t) :: non_neg_integer\n def length(content)\n\n @doc ~S\"\"\"\n Return a stream which can be used for reading the content.\n \"\"\"\n @spec stream(content :: t) :: Enumerable.t()\n def stream(content)\nend\n\ndefimpl Xgit.ContentSource, for: List do\n @impl true\n def length(list), do: Enum.count(list)\n\n @impl true\n def stream(list), do: list\nend\n\ndefimpl Xgit.ContentSource, for: BitString do\n @impl true\n def length(s), do: byte_size(s)\n\n @impl true\n def stream(s), do: :binary.bin_to_list(s)\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,149,null,null,null,6,null,null,null,189,null,null,null,6,null,null,null,10,null,null,null,null,28,65,null,null,null,null,1,1,null,null,null,null,1,1,null,null,null,null,1,1,null,null,null,null,3,12,null,null,null,null,null,23,2,197,9,2,7,null,null,null,null,null,null,null,null,null,null,null,null,null,42,1,52,11,1,null,null,null,null,1,null,null,null,null,null,null,null,null],"name":"lib/xgit/file_mode.ex","source":"defmodule Xgit.FileMode do\n @moduledoc ~S\"\"\"\n Describes the file type as represented on disk.\n \"\"\"\n\n import Xgit.Util.ForceCoverage\n\n @typedoc ~S\"\"\"\n An integer describing the file type as represented on disk.\n\n Git uses a variation on the Unix file permissions flags to denote a file's\n intended type on disk. The following values are recognized:\n\n * `0o100644` - normal file\n * `0o100755` - executable file\n * `0o120000` - symbolic link\n * `0o040000` - tree (subdirectory)\n * `0o160000` - submodule (aka gitlink)\n\n This module is intended to be `use`d. Doing so will create an `alias` to the module\n so as to make `FileMode.t` available for typespecs and will `import` the\n `is_file_mode/1` guard.\n \"\"\"\n @type t :: 0o100644 | 0o100755 | 0o120000 | 0o040000 | 0o160000\n\n @doc \"Mode indicating an entry is a tree (aka directory).\"\n @spec tree :: t\n def tree, do: cover(0o040000)\n\n @doc \"Mode indicating an entry is a symbolic link.\"\n @spec symlink :: t\n def symlink, do: cover(0o120000)\n\n @doc \"Mode indicating an entry is a non-executable file.\"\n @spec regular_file :: t\n def regular_file, do: cover(0o100644)\n\n @doc \"Mode indicating an entry is an executable file.\"\n @spec executable_file :: t\n def executable_file, do: cover(0o100755)\n\n @doc \"Mode indicating an entry is a submodule commit in another repository.\"\n @spec gitlink :: t\n def gitlink, do: cover(0o160000)\n\n @doc \"Return `true` if the file mode represents a tree.\"\n @spec tree?(file_mode :: term) :: boolean\n def tree?(file_mode)\n def tree?(0o040000), do: cover(true)\n def tree?(_), do: cover(false)\n\n @doc \"Return `true` if the file mode a symbolic link.\"\n @spec symlink?(file_mode :: term) :: boolean\n def symlink?(file_mode)\n def symlink?(0o120000), do: cover(true)\n def symlink?(_), do: cover(false)\n\n @doc \"Return `true` if the file mode represents a regular file.\"\n @spec regular_file?(file_mode :: term) :: boolean\n def regular_file?(file_mode)\n def regular_file?(0o100644), do: cover(true)\n def regular_file?(_), do: cover(false)\n\n @doc \"Return `true` if the file mode represents an executable file.\"\n @spec executable_file?(file_mode :: term) :: boolean\n def executable_file?(file_mode)\n def executable_file?(0o100755), do: cover(true)\n def executable_file?(_), do: cover(false)\n\n @doc \"Return `true` if the file mode represents a submodule commit in another repository.\"\n @spec gitlink?(file_mode :: term) :: boolean\n def gitlink?(file_mode)\n def gitlink?(0o160000), do: cover(true)\n def gitlink?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Return `true` if the value is one of the known file mode values.\n \"\"\"\n @spec valid?(term) :: boolean\n def valid?(0o040000), do: cover(true)\n def valid?(0o120000), do: cover(true)\n def valid?(0o100644), do: cover(true)\n def valid?(0o100755), do: cover(true)\n def valid?(0o160000), do: cover(true)\n def valid?(_), do: cover(false)\n\n @valid_file_modes [0o100644, 0o100755, 0o120000, 0o040000, 0o160000]\n\n @doc ~S\"\"\"\n Return a rendered version of this file mode as an octal charlist.\n\n No leading zeros are included.\n\n Optimized for the known file modes. Errors out for any other mode.\n \"\"\"\n @spec to_short_octal(file_mode :: t) :: charlist\n def to_short_octal(file_mode)\n\n def to_short_octal(0o040000), do: cover('40000')\n def to_short_octal(0o120000), do: cover('120000')\n def to_short_octal(0o100644), do: cover('100644')\n def to_short_octal(0o100755), do: cover('100755')\n def to_short_octal(0o160000), do: cover('160000')\n\n @doc ~S\"\"\"\n This guard requires the value to be one of the known git file mode values.\n \"\"\"\n defguard is_file_mode(t) when t in @valid_file_modes\n\n defmacro __using__(opts) do\n quote location: :keep, bind_quoted: [opts: opts] do\n alias Xgit.FileMode\n import Xgit.FileMode, only: [is_file_mode: 1]\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,609,null,null,640,null,null,null,null,null,null,null,null,null,1,1,null,null,null,9,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,9,null,null,null,null,null,null,null,null,null,null,null,null,null,null,82,null,null,null,null,null,null,null,null,3,null,null,null,null,4,null,null,null,null,2,null,null,null,null,2,null,null,null,2,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,86,null,86,41,null,null,44,null,null,null],"name":"lib/xgit/util/observed_file.ex","source":"defmodule Xgit.Util.ObservedFile do\n @moduledoc false\n\n # Records the cached parsed state of the file and its modification date\n # so that Xgit can avoid the work of re-parsing that file when we can\n # be sure it is unchanged.\n\n import Xgit.Util.ForceCoverage\n\n @typedoc ~S\"\"\"\n Cache for parsed state of the file and information about its\n file system state.\n\n ## Struct Members\n\n * `path`: path to the file\n * `exists?`: `true` if the file existed last time we checked\n * `last_modified_time`: POSIX file time for the file last time we checked\n (`nil` if file did not exist then)\n * `last_checked_time`: POSIX time stamp when file status was checked\n (used to help avoid the \"racy git problem\")\n * `parsed_state`: result from either `parse_fn` or `empty_fn`\n \"\"\"\n @type t :: %__MODULE__{\n path: Path.t(),\n exists?: boolean,\n last_modified_time: integer | nil,\n last_checked_time: integer | nil,\n parsed_state: any\n }\n\n @typedoc ~S\"\"\"\n A function that parses a file at a given path and returns a parsed state\n for that file.\n \"\"\"\n @type parse_fn :: (Path.t() -> any)\n\n @typedoc ~S\"\"\"\n A function that can return a state for the file format when the file\n doesn't exist.\n \"\"\"\n @type empty_fn :: (() -> any)\n\n @enforce_keys [:path, :exists?, :parsed_state]\n defstruct [\n :path,\n :exists?,\n :last_modified_time,\n :last_checked_time,\n :parsed_state\n ]\n\n @doc ~S\"\"\"\n Record an initial observation of the contents of the file.\n\n ## Parameters\n\n `parse_fn` is a function with one argument (path) that parses the file\n if it exists and returns the content that will be stored in `parsed_state`.\n\n `empty_fn` is a function with zero arguments that returns the desired state\n for `parsed_state` in the event there is no file at this path.\n \"\"\"\n @spec initial_state_for_path(path :: Path.t(), parse_fn :: parse_fn, empty_fn :: empty_fn) :: t\n def initial_state_for_path(path, parse_fn, empty_fn)\n when is_binary(path) and is_function(parse_fn, 1) and is_function(empty_fn, 0),\n do: state_from_file_stat(path, parse_fn, empty_fn, File.stat(path, time: :posix))\n\n defp state_from_file_stat(path, parse_fn, _empty_fn, {:ok, %{type: :regular, mtime: mtime}}) do\n %__MODULE__{\n path: path,\n exists?: true,\n last_modified_time: mtime,\n last_checked_time: System.os_time(:second),\n parsed_state: parse_fn.(path)\n }\n end\n\n defp state_from_file_stat(path, _parse_fn, _empty_fn, {:ok, %{type: file_type}}) do\n raise ArgumentError,\n \"Xgit.Util.ObservedFile: path #{path} points to an item of type #{file_type}; should be a regular file or no file at all\"\n end\n\n defp state_from_file_stat(path, _parse_fn, empty_fn, {:error, :enoent}) do\n %__MODULE__{\n path: path,\n exists?: false,\n parsed_state: empty_fn.()\n }\n end\n\n @doc ~S\"\"\"\n Return `true` if the file has potentially changed since the last\n recorded observation. This can happen if:\n\n * The modified time has changed since the previous observation.\n * The file exists when it did not previously exist (or vice versa).\n * The modified time is so recent as to be indistinguishable from\n the time at which the initial snapshot was recorded. (This is often\n referred to as the \"racy git problem.\")\n\n This function does not update the cached state of the file.\n \"\"\"\n @spec maybe_dirty?(observed_file :: t) :: boolean\n def maybe_dirty?(%__MODULE__{path: path} = observed_file) when is_binary(path),\n do: maybe_dirty_for_file_stat?(observed_file, File.stat(path, time: :posix))\n\n defp maybe_dirty_for_file_stat?(\n %__MODULE__{\n exists?: true,\n last_modified_time: last_modified_time,\n last_checked_time: last_checked_time\n },\n {:ok, %File.Stat{type: :regular, mtime: last_modified_time}}\n )\n when is_integer(last_modified_time) do\n # File still exists and modified time is same as before. Are we in racy git state?\n # Certain file systems round to the nearest few seconds, so last mod time has\n # to be at least 3 seconds before we checked status for us to start believing file content.\n\n last_modified_time >= last_checked_time - 2\n end\n\n defp maybe_dirty_for_file_stat?(\n %__MODULE__{exists?: true, last_modified_time: lmt1},\n {:ok, %File.Stat{type: :regular, mtime: lmt2}}\n )\n when is_integer(lmt1) and is_integer(lmt2) do\n # File still exists but modified time doesn't match: Dirty.\n cover true\n end\n\n defp maybe_dirty_for_file_stat?(%__MODULE__{exists?: false}, {:error, :enoent}) do\n # File didn't exist before; still doesn't: Not dirty.\n cover false\n end\n\n defp maybe_dirty_for_file_stat?(%__MODULE__{exists?: false}, {:ok, %File.Stat{type: :regular}}) do\n # File didn't exist before; it does now.\n cover true\n end\n\n defp maybe_dirty_for_file_stat?(%__MODULE__{exists?: true}, {:error, :enoent}) do\n # File existed before; now it doesn't.\n cover true\n end\n\n defp maybe_dirty_for_file_stat?(%__MODULE__{path: path}, {:ok, %{type: file_type}}) do\n raise ArgumentError,\n \"Xgit.Util.ObservedFile: path #{path} points to an item of type #{file_type}; should be a regular file or no file at all\"\n end\n\n @doc ~S\"\"\"\n Update the cached state of the file if it has potentially changed since the last\n observation.\n\n As noted in `maybe_dirty?/1`, we err on the side of caution if the modification date\n alone can not be trusted to reflect changes to the file's content.\n\n ## Parameters\n\n `parse_fn` is a function with one argument (path) that parses the file\n if it exists and returns the content that will be stored in `parsed_state`.\n\n `empty_fn` is a function with zero arguments that returns the desired state\n for `parsed_state` in the event there is no file at this path.\n\n If the file state has potentially changed (see `maybe_dirty?/1`) then either\n `parse_fn` or `empty_fn` will be called to generate a new value for `parsed_state`.\n\n ## Return Value\n\n Returns an `ObservedFile` struct which may have been updated via either `parse_fn/1`\n or `empty_fn/0` as appropriate.\n \"\"\"\n @spec update_state_if_maybe_dirty(\n observed_file :: t,\n parse_fn :: parse_fn,\n empty_fn :: empty_fn\n ) :: t\n def update_state_if_maybe_dirty(%__MODULE__{path: path} = observed_file, parse_fn, empty_fn)\n when is_binary(path) and is_function(parse_fn, 1) and is_function(empty_fn, 0) do\n file_stat = File.stat(path, time: :posix)\n\n if maybe_dirty_for_file_stat?(observed_file, file_stat) do\n state_from_file_stat(path, parse_fn, empty_fn, file_stat)\n else\n # We're sure the file is unchanged: Return cached state as is.\n cover observed_file\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,532,null,532,531,null,null,null,null,531,null,530,null,530,null,null,null,530,null,null,1,null,null,null,null,null,null,null,null,530,530,null,null,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,21,null,null,21,17,4,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,4,null,null,4,3,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,59,null,null,59,56,56,56,56,null,3,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,16,15,null,null,null,16,null,16,1,null,null,null,15,null,null,null,null,null,null,null,15,null,15,null,12,10,null,2,3,null,null,null,null,15,null,null,null,null,null,null,null,null,null,null,null,null,null,null,27,27,27,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,12,null,12,null,null,null,null,null,null,null,null,null,null,null,null,19,null,null,null,null,null,null,null,null,null,19,null,null,13,18,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,28,26,null,null,null,28,null,28,1,null,null,null,27,null,27,1,null,null,null,null,null,null,null,null,null,null,null,26,24,22,null,20,18,16,null,6,2,2,null,null,null,null,null,26,null,null,null,null,11,9,null,9,null,null,null,null,null,null,16,null,null,38,34,2,2,null,null,null,null,106,72,71,71,null,71,null,34,1,null,null,null,null,3,72,69,69,null,null,null,null,null,null,null,528,null,21,null,4,null,null,59,null,null,15,null,null,26,null,null,1,1,null,null,null,2,null],"name":"lib/xgit/repository/working_tree.ex","source":"defmodule Xgit.Repository.WorkingTree do\n @moduledoc ~S\"\"\"\n A working tree is an on-disk manifestation of a commit or pending commit in\n a git repository.\n\n An `Xgit.Repository.Storage` may have a default working tree associated with it or\n it may not. (A repository without a working tree is often referred to as a\n \"bare\" repository.)\n\n More than one working tree may be associated with a repository, though this\n is not (currently) well-tested in Xgit.\n\n A working tree is itself strictly tied to a file system, but it need not be\n tied to an on-disk repository instance.\n\n _IMPORTANT NOTE:_ This is intended as a reference implementation largely\n for testing purposes and may not necessarily handle all of the edge cases that\n the traditional `git` command-line interface will handle.\n \"\"\"\n use GenServer\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.DirCache\n alias Xgit.DirCache.Entry, as: DirCacheEntry\n alias Xgit.FilePath\n alias Xgit.Object\n alias Xgit.ObjectId\n alias Xgit.Repository.Storage\n alias Xgit.Tree\n alias Xgit.Util.TrailingHashDevice\n\n require Logger\n\n @typedoc ~S\"\"\"\n The process ID for a `WorkingTree` process.\n \"\"\"\n @type t :: pid\n\n @doc \"\"\"\n Starts a `WorkingTree` process linked to the current process.\n\n ## Parameters\n\n `repository` is the associated `Xgit.Repository.Storage` process.\n\n `work_dir` is the root path for the working tree.\n\n `options` are passed to `GenServer.start_link/3`.\n\n ## Return Value\n\n See `GenServer.start_link/3`.\n\n If the process is unable to create the working directory root, the response\n will be `{:error, {:mkdir, :eexist}}` (or perhaps a different posix error code).\n \"\"\"\n @spec start_link(repository :: Storage.t(), work_dir :: Path.t(), GenServer.options()) ::\n GenServer.on_start()\n def start_link(repository, work_dir, options \\\\ [])\n when is_pid(repository) and is_binary(work_dir) and is_list(options) do\n Storage.assert_valid(repository)\n GenServer.start_link(__MODULE__, {repository, work_dir}, options)\n end\n\n @impl true\n def init({repository, work_dir}) do\n case File.mkdir_p(work_dir) do\n :ok ->\n index_path = Path.join([work_dir, \".git\", \"index\"])\n\n Process.monitor(repository)\n\n # Read index file here or maybe in a :continue handler?\n\n cover {:ok, %{repository: repository, work_dir: work_dir, index_path: index_path}}\n\n {:error, reason} ->\n cover {:stop, {:mkdir, reason}}\n end\n end\n\n @doc ~S\"\"\"\n Returns `true` if the argument is a PID representing a valid `WorkingTree` process.\n \"\"\"\n @spec valid?(working_tree :: term) :: boolean\n def valid?(working_tree) when is_pid(working_tree) do\n Process.alive?(working_tree) &&\n GenServer.call(working_tree, :valid_working_tree?) == :valid_working_tree\n end\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Returns a current snapshot of the working tree state.\n\n ## Return Value\n\n `{:ok, dir_cache}` if an index file exists and could be parsed as a dir cache file.\n\n `{:ok, dir_cache}` if no index file exists. (`dir_cache` will have zero entries.)\n\n `{:error, reason}` if the file exists but could not be parsed. (See\n `Xgit.DirCache.from_iodevice/1` for possible reason codes.\n\n ## TO DO\n\n Find index file in appropriate location (i.e. as potentially modified\n by `.git/config` file). [Issue #86](https://github.com/elixir-git/xgit/issues/86)\n\n Cache state of index file so we don't have to parse it for every\n call. [Issue #87](https://github.com/elixir-git/xgit/issues/87)\n\n Consider scalability of passing a potentially large `Xgit.DirCache` structure\n across process boundaries. [Issue #88](https://github.com/elixir-git/xgit/issues/88)\n \"\"\"\n @spec dir_cache(working_tree :: t) ::\n {:ok, DirCache.t()} | {:error, reason :: DirCache.from_iodevice_reason()}\n def dir_cache(working_tree) when is_pid(working_tree),\n do: GenServer.call(working_tree, :dir_cache)\n\n defp handle_dir_cache(%{index_path: index_path} = state) do\n case parse_index_file_if_exists(index_path) do\n {:ok, dir_cache} -> {:reply, {:ok, dir_cache}, state}\n {:error, reason} -> {:reply, {:error, reason}, state}\n end\n end\n\n @typedoc ~S\"\"\"\n Error code reasons returned by `reset_dir_cache/1`.\n \"\"\"\n @type reset_dir_cache_reason :: DirCache.to_iodevice_reason()\n\n @doc ~S\"\"\"\n Reset the dir cache to empty and rewrite the index file accordingly.\n\n ## Return Values\n\n `:ok` if successful.\n\n `{:error, reason}` if unable. The relevant reason codes may come from:\n\n * `Xgit.DirCache.to_iodevice/2`.\n \"\"\"\n @spec reset_dir_cache(working_tree :: t) ::\n :ok | {:error, reset_dir_cache_reason}\n def reset_dir_cache(working_tree) when is_pid(working_tree),\n do: GenServer.call(working_tree, :reset_dir_cache)\n\n defp handle_reset_dir_cache(%{index_path: index_path} = state) do\n case write_index_file(DirCache.empty(), index_path) do\n :ok -> cover {:reply, :ok, state}\n {:error, reason} -> cover {:reply, {:error, reason}, state}\n end\n end\n\n @typedoc ~S\"\"\"\n Error code reasons returned by `update_dir_cache/3`.\n \"\"\"\n @type update_dir_cache_reason ::\n DirCache.add_entries_reason()\n | DirCache.from_iodevice_reason()\n | DirCache.remove_entries_reason()\n | DirCache.to_iodevice_reason()\n\n @doc ~S\"\"\"\n Apply updates to the dir cache and rewrite the index tree accordingly.\n\n ## Parameters\n\n `add`: a list of `Xgit.DirCache.Entry` structs to add to the dir cache.\n In the event of collisions with existing entries, the existing entries will\n be replaced with the corresponding new entries.\n\n `remove`: a list of `{path, stage}` tuples to remove from the dir cache.\n `stage` must be `0..3` to remove a specific stage entry or `:all` to match\n any entry for the `path`.\n\n ## Return Values\n\n `{:ok, dir_cache}` where `dir_cache` is the original `dir_cache` with the new\n entries added (and properly sorted) and targeted entries removed.\n\n `{:error, reason}` if unable. The relevant reason codes may come from:\n\n * `Xgit.DirCache.add_entries/2`\n * `Xgit.DirCache.from_iodevice/1`\n * `Xgit.DirCache.remove_entries/2`\n * `Xgit.DirCache.to_iodevice/2`.\n\n ## TO DO\n\n Find index file in appropriate location (i.e. as potentially modified\n by `.git/config` file). [Issue #86](https://github.com/elixir-git/xgit/issues/86)\n\n Cache state of index file so we don't have to parse it for every\n call. [Issue #87](https://github.com/elixir-git/xgit/issues/87)\n \"\"\"\n @spec update_dir_cache(\n working_tree :: t,\n add :: [DirCacheEntry.t()],\n remove :: [{path :: FilePath.t(), stage :: DirCacheEntry.stage_match()}]\n ) ::\n {:ok, DirCache.t()} | {:error, update_dir_cache_reason}\n def update_dir_cache(working_tree, add, remove)\n when is_pid(working_tree) and is_list(add) and is_list(remove),\n do: GenServer.call(working_tree, {:update_dir_cache, add, remove})\n\n defp handle_update_dir_cache(add, remove, %{index_path: index_path} = state) do\n with {:ok, dir_cache} <- parse_index_file_if_exists(index_path),\n {:ok, dir_cache} <- DirCache.add_entries(dir_cache, add),\n {:ok, dir_cache} <- DirCache.remove_entries(dir_cache, remove),\n :ok <- write_index_file(dir_cache, index_path) do\n {:reply, :ok, state}\n else\n {:error, reason} -> {:reply, {:error, reason}, state}\n end\n end\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `read_tree/3`.\n \"\"\"\n @type read_tree_reason ::\n :objects_missing\n | DirCache.to_iodevice_reason()\n | Storage.get_object_reason()\n | Tree.from_object_reason()\n\n @doc ~S\"\"\"\n Read a `tree` object and any trees it may refer to and populate the dir cache accordingly.\n\n Does not update files in the working tree itself.\n\n Analogous to [`git read-tree`](https://git-scm.com/docs/git-read-tree).\n\n ## Parameters\n\n `object_id` is the object ID of the root working tree.\n\n ## Options\n\n `:missing_ok?`: `true` to ignore any objects that are referenced by the tree\n structures that are not present in the object database. Normally this would be an error.\n\n ## Return Value\n\n `:ok` if successful.\n\n `{:error, :objects_missing}` if any of the objects referenced by the index\n are not present in the object store. (Exception: If `missing_ok?` is `true`,\n then this condition will be ignored.)\n\n Reason codes may also come from the following functions:\n\n * `Xgit.DirCache.to_iodevice/2`\n * `Xgit.Repository.Storage.get_object/2`\n * `Xgit.Tree.from_object/1`\n\n ## TO DO\n\n Implement `--prefix` option. https://github.com/elixir-git/xgit/issues/175\n \"\"\"\n @spec read_tree(working_tree :: t, object_id :: ObjectId.t(), missing_ok?: boolean) ::\n :ok | {:error, reason :: read_tree_reason}\n def read_tree(working_tree, object_id, opts \\\\ [])\n when is_pid(working_tree) and is_binary(object_id) and is_list(opts) do\n missing_ok? = validate_read_tree_options(opts)\n GenServer.call(working_tree, {:read_tree, object_id, missing_ok?})\n end\n\n defp validate_read_tree_options(opts) do\n missing_ok? = Keyword.get(opts, :missing_ok?, false)\n\n unless is_boolean(missing_ok?) do\n raise ArgumentError,\n \"Xgit.Repository.WorkingTree.read_tree/3: missing_ok? #{inspect(missing_ok?)} is invalid\"\n end\n\n missing_ok?\n end\n\n defp handle_read_tree(\n object_id,\n missing_ok?,\n %{repository: repository, index_path: index_path} = state\n ) do\n with {:ok, %DirCache{entries: entries} = dir_cache} <-\n tree_to_dir_cache(repository, object_id),\n {:has_all_objects?, true} <-\n {:has_all_objects?, has_all_objects?(repository, entries, missing_ok?)},\n :ok <- write_index_file(dir_cache, index_path) do\n cover {:reply, :ok, state}\n else\n {:error, reason} -> cover {:reply, {:error, reason}, state}\n {:has_all_objects?, false} -> cover {:reply, {:error, :objects_missing}, state}\n end\n end\n\n defp tree_to_dir_cache(repository, object_id) do\n case tree_to_dir_cache_entries(repository, object_id, '', []) do\n {:ok, reversed_entries} ->\n {:ok,\n %DirCache{\n version: 2,\n entry_count: Enum.count(reversed_entries),\n entries: Enum.reverse(reversed_entries)\n }}\n\n {:error, reason} ->\n {:error, reason}\n end\n end\n\n defp tree_to_dir_cache_entries(repository, object_id, prefix, acc) do\n with {:ok, object} <- Storage.get_object(repository, object_id),\n {:ok, %Tree{entries: tree_entries} = _tree} <- Tree.from_object(object) do\n tree_entries_to_dir_cache_entries(repository, tree_entries, prefix, acc)\n # TO DO: A malformed tree could cause an infinite loop here.\n # https://github.com/elixir-git/xgit/issues/178\n else\n {:error, reason} -> {:error, reason}\n end\n end\n\n defp tree_entries_to_dir_cache_entries(repository, tree_entries, prefix, acc)\n\n defp tree_entries_to_dir_cache_entries(_repository, [], _prefix, acc), do: {:ok, acc}\n\n defp tree_entries_to_dir_cache_entries(\n repository,\n [%{mode: 0o040000, object_id: object_id, name: name} = _tree_entry | tail],\n prefix,\n acc\n ) do\n case tree_to_dir_cache_entries(repository, object_id, append_to_prefix(prefix, name), acc) do\n {:ok, acc} ->\n tree_entries_to_dir_cache_entries(repository, tail, prefix, acc)\n\n {:error, reason} ->\n {:error, reason}\n end\n end\n\n defp tree_entries_to_dir_cache_entries(\n repository,\n [%{mode: mode, object_id: object_id, name: name} = _tree_entry | tail],\n prefix,\n acc\n ) do\n dir_cache_entry = %DirCacheEntry{\n name: append_to_prefix(prefix, name),\n stage: 0,\n object_id: object_id,\n mode: mode,\n size: 0,\n ctime: 0,\n mtime: 0\n }\n\n tree_entries_to_dir_cache_entries(repository, tail, prefix, [dir_cache_entry | acc])\n end\n\n defp append_to_prefix('', name), do: name\n defp append_to_prefix(prefix, name), do: '#{prefix}/#{name}'\n\n @typedoc ~S\"\"\"\n Reason codes that can be returned by `write_tree/2`.\n \"\"\"\n @type write_tree_reason ::\n :incomplete_merge\n | :objects_missing\n | :prefix_not_found\n | DirCache.from_iodevice_reason()\n | DirCache.to_tree_objects_reason()\n | Storage.put_loose_object_reason()\n\n @doc ~S\"\"\"\n Translates the current dir cache, as reflected in its index file, to one or more\n tree objects.\n\n The working tree must be in a fully-merged state.\n\n ## Options\n\n `:missing_ok?`: `true` to ignore any objects that are referenced by the index\n file that are not present in the object database. Normally this would be an error.\n\n `:prefix`: (`Xgit.FilePath`) if present, returns the `object_id` for the tree at\n the given subdirectory. If not present, writes a tree corresponding to the root.\n (The entire tree is written in either case.)\n\n ## Return Value\n\n `{:ok, object_id}` with the object ID for the tree that was generated. (If the exact tree\n specified by the index already existed, it will return that existing tree's ID.)\n\n `{:error, :incomplete_merge}` if any entry in the index file is not fully merged.\n\n `{:error, :objects_missing}` if any of the objects referenced by the index\n are not present in the object store. (Exception: If `missing_ok?` is `true`,\n then this condition will be ignored.)\n\n `{:error, :prefix_not_found}` if `prefix` was specified, but that prefix is not referenced\n in the index file.\n\n Reason codes may also come from the following functions:\n\n * `Xgit.DirCache.from_iodevice/1`\n * `Xgit.DirCache.to_tree_objects/2`\n * `Xgit.Repository.Storage.put_loose_object/2`\n \"\"\"\n @spec write_tree(working_tree :: t, missing_ok?: boolean, prefix: FilePath.t()) ::\n {:ok, object_id :: ObjectId.t()} | {:error, reason :: write_tree_reason}\n def write_tree(working_tree, opts \\\\ []) when is_pid(working_tree) do\n {missing_ok?, prefix} = validate_write_tree_options(opts)\n GenServer.call(working_tree, {:write_tree, missing_ok?, prefix})\n end\n\n defp validate_write_tree_options(opts) do\n missing_ok? = Keyword.get(opts, :missing_ok?, false)\n\n unless is_boolean(missing_ok?) do\n raise ArgumentError,\n \"Xgit.Repository.WorkingTree.write_tree/2: missing_ok? #{inspect(missing_ok?)} is invalid\"\n end\n\n prefix = Keyword.get(opts, :prefix, [])\n\n unless prefix == [] or FilePath.valid?(prefix) do\n raise ArgumentError,\n \"Xgit.Repository.WorkingTree.write_tree/2: prefix #{inspect(prefix)} is invalid (should be a charlist, not a String)\"\n end\n\n {missing_ok?, prefix}\n end\n\n defp handle_write_tree(\n missing_ok?,\n prefix,\n %{repository: repository, index_path: index_path} = state\n ) do\n with {:ok, %DirCache{entries: entries} = dir_cache} <- parse_index_file_if_exists(index_path),\n {:merged?, true} <- {:merged?, DirCache.fully_merged?(dir_cache)},\n {:has_all_objects?, true} <-\n {:has_all_objects?, has_all_objects?(repository, entries, missing_ok?)},\n {:ok, objects, %Object{id: object_id}} <- DirCache.to_tree_objects(dir_cache, prefix),\n :ok <- write_all_objects(repository, objects) do\n cover {:reply, {:ok, object_id}, state}\n else\n {:error, reason} -> cover {:reply, {:error, reason}, state}\n {:merged?, false} -> cover {:reply, {:error, :incomplete_merge}, state}\n {:has_all_objects?, false} -> cover {:reply, {:error, :objects_missing}, state}\n end\n end\n\n defp has_all_objects?(repository, entries, missing_ok?)\n\n defp has_all_objects?(_repository, _entries, true), do: cover(true)\n\n defp has_all_objects?(repository, entries, false) do\n entries\n |> Enum.chunk_every(100)\n |> Enum.all?(fn entries_chunk ->\n Storage.has_all_object_ids?(\n repository,\n Enum.map(entries_chunk, fn %{object_id: id} -> id end)\n )\n end)\n end\n\n defp write_all_objects(repository, objects)\n\n defp write_all_objects(_repository, []), do: cover(:ok)\n\n defp write_all_objects(repository, [object | tail]) do\n case Storage.put_loose_object(repository, object) do\n :ok -> write_all_objects(repository, tail)\n {:error, :object_exists} -> write_all_objects(repository, tail)\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp parse_index_file_if_exists(index_path) do\n with true <- File.exists?(index_path),\n {:ok, iodevice} when is_pid(iodevice) <- TrailingHashDevice.open_file(index_path) do\n res = DirCache.from_iodevice(iodevice)\n :ok = File.close(iodevice)\n\n res\n else\n false -> cover {:ok, DirCache.empty()}\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp write_index_file(dir_cache, index_path) do\n with {:ok, iodevice}\n when is_pid(iodevice) <- TrailingHashDevice.open_file_for_write(index_path),\n :ok <- DirCache.to_iodevice(dir_cache, iodevice),\n :ok <- File.close(iodevice) do\n :ok\n else\n {:error, reason} -> {:error, reason}\n end\n end\n\n @impl true\n def handle_call(:valid_working_tree?, _from, state), do: {:reply, :valid_working_tree, state}\n\n def handle_call(:dir_cache, _from, state), do: handle_dir_cache(state)\n\n def handle_call(:reset_dir_cache, _from, state), do: handle_reset_dir_cache(state)\n\n def handle_call({:update_dir_cache, add, remove}, _from, state),\n do: handle_update_dir_cache(add, remove, state)\n\n def handle_call({:read_tree, object_id, missing_ok?}, _from, state),\n do: handle_read_tree(object_id, missing_ok?, state)\n\n def handle_call({:write_tree, missing_ok?, prefix}, _from, state),\n do: handle_write_tree(missing_ok?, prefix, state)\n\n def handle_call(message, _from, state) do\n Logger.warn(\"WorkingTree received unrecognized call #{inspect(message)}\")\n {:reply, {:error, :unknown_message}, state}\n end\n\n @impl true\n def handle_info({:DOWN, _ref, :process, _object, reason}, state), do: {:stop, reason, state}\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,599,null,null,null,null,null,null,null,null,null,1509,6,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,57,null,null,null,434,null,null,null,null,null,null,null,null,null,null,null,null,null,306,null,306,306,273,null,33,null,null,null,null,null,null,null,null,null,null,null,252,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,215,null,null,null,null,215,null,215,null,null,null,215,null,null,null,211,null,null,4,8,null,null,null],"name":"lib/xgit/object_id.ex","source":"defmodule Xgit.ObjectId do\n @moduledoc ~S\"\"\"\n An object ID is a string that identifies an object within a repository.\n\n This string must match the format for a SHA-1 hash (i.e. 40 characters\n of lowercase hex).\n \"\"\"\n use Xgit.ObjectType\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.ContentSource\n\n @typedoc \"A string containing 40 bytes of lowercase hex digits.\"\n @type t :: String.t()\n\n @doc ~S\"\"\"\n Get the special all-null object ID, often used to stand-in for no object.\n \"\"\"\n @spec zero :: t\n def zero, do: cover(\"0000000000000000000000000000000000000000\")\n\n @doc ~S\"\"\"\n Returns `true` if the value is a valid object ID.\n\n (In other words, is it a string containing 40 characters of lowercase hex?)\n \"\"\"\n @spec valid?(id :: term) :: boolean\n def valid?(id)\n\n def valid?(s) when is_binary(s), do: String.length(s) == 40 && String.match?(s, ~r/^[0-9a-f]+$/)\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Read an object ID from raw binary or bytelist.\n\n ## Parameters\n\n `raw_object_id` should be either a binary or list containing a raw object ID (not\n hex-encoded). It should be exactly 20 bytes.\n\n ## Return Value\n\n The object ID rendered as lowercase hex. (See `Xgit.ObjectId`.)\n \"\"\"\n @spec from_binary_iodata(b :: iodata) :: t\n def from_binary_iodata(b) when is_list(b) do\n b\n |> IO.iodata_to_binary()\n |> from_binary_iodata()\n end\n\n def from_binary_iodata(b) when is_binary(b) and byte_size(b) == 20,\n do: Base.encode16(b, case: :lower)\n\n @doc ~S\"\"\"\n Read an object ID from a hex string (charlist).\n\n ## Return Value\n\n If a valid ID is found, returns `{id, next}` where `id` is the matched ID\n as a string and `next` is the remainder of the charlist after the matched ID.\n\n If no such ID is found, returns `false`.\n \"\"\"\n @spec from_hex_charlist(b :: charlist) :: {t, charlist} | false\n def from_hex_charlist(b) when is_list(b) do\n {maybe_id, remainder} = Enum.split(b, 40)\n\n with maybe_id_string <- to_string(maybe_id),\n true <- valid?(maybe_id_string) do\n cover {maybe_id_string, remainder}\n else\n _ -> cover false\n end\n end\n\n @doc ~S\"\"\"\n Convert an object ID to raw binary representation.\n\n ## Return Value\n\n A 20-byte binary encoding the object ID.\n \"\"\"\n @spec to_binary_iodata(id :: t) :: binary\n def to_binary_iodata(id), do: Base.decode16!(id, case: :lower)\n\n @doc ~S\"\"\"\n Assign an object ID for a given data blob.\n\n No validation is performed on the content.\n\n ## Parameters\n\n * `data` describes how to read the data. (See `Xgit.ContentSource`.)\n * `type` is the intended git object type for this data. (See `Xgit.ObjectType`.)\n\n ## Return Value\n\n The object ID. (See `Xgit.ObjectId`.)\n \"\"\"\n @spec calculate_id(data :: ContentSource.t(), type :: ObjectType.t()) :: t()\n def calculate_id(data, type) when not is_nil(data) and is_object_type(type) do\n size = ContentSource.length(data)\n\n # Erlang/Elixir :sha == SHA-1\n :sha\n |> :crypto.hash_init()\n |> :crypto.hash_update('#{type}')\n |> :crypto.hash_update(' ')\n |> :crypto.hash_update('#{size}')\n |> :crypto.hash_update([0])\n |> hash_update(ContentSource.stream(data))\n |> :crypto.hash_final()\n |> from_binary_iodata()\n end\n\n defp hash_update(crypto_state, data) when is_list(data),\n do: :crypto.hash_update(crypto_state, data)\n\n defp hash_update(crypto_state, data) do\n Enum.reduce(data, crypto_state, fn item, crypto_state ->\n :crypto.hash_update(crypto_state, item)\n end)\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,498,null,null,18,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,195,null,null,null,null,null,302,282,20,17,7,4,null,null,null,null,null,null,null,null,51,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,4,251,246,null,null,3,null,null,2,423,425,422,null,null,243,null,null,null,null,null,null,27,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,62,61,60,60,66,59,null,59,null,null,1,1,1,null,null,null,null,null,5,54,null,null,null,null,null,64,61,1,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,65,64,null,63,null,63,null,null,1,1,null,null,null,2,null,null,null,8,null,1,null,null,null,10,53,null,null,null,null,null,2,null,null,null,null,5,null,null,5,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,null,null,27,26,26,null,23,null,null,52,null,23,null,1,3,null,null,null,null,null,null,31,null,null,null,null,null,null,null,136,100,null,100,285,41,null,59,null,null,null,100,null,null,null,36,null,null,null,null,67,67,null,null,null,null,null,null,null,null,41,null,null,41,82,null,41,41,null,41,null,41,null,null,41,null,41,null,null,null,41,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,109,108,100,null,98,null,96,null,86,85,84,null,null,null,null,null,null,1,8,2,1,1,10,1,1,null,null,null,null,108,100,8,null,null,null,5,null,null,91,null,167,null,null,91,81,null,10,null,null,null,null,167,165,165,165,165,165,165,165,164,164,null,164,null,161,160,157,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,10,null,null,null,157,10,null,null,95,null,null,null,null,9,null,null,1,null,null,null,null,9,9,null,9,9,null,null,9,9,null,null,null,1,1,null,1,1,null,null,null,null,null,null,161,null,null,null,null,160,null,null,1,null,null,null,null,1858,null,null,null,null,1854,null,null,4,null,null,null,null,164,162,2,null,null,null,null,159,null,159,null,null,null,157,null,null,2,null,null,null,null,null,286,3,19,null,307,7,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,85,84,83,82,82,null,81,null,1,1,1,1,null,null,null,null,82,null,81,null,null,149,148,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,149,null,149,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,149,null,null,null,null,null,149,null,null,8,290,null,null,149,149,null,null],"name":"lib/xgit/dir_cache.ex","source":"defmodule Xgit.DirCache do\n @moduledoc ~S\"\"\"\n A directory cache records the current (intended) contents of a working tree\n when last scanned or created by git.\n\n In Xgit, the `DirCache` structure is an abstract, in-memory data structure\n without any tie to a specific persistence mechanism. Persistence is implemented\n by a specific implementation of the `Xgit.Repository.Storage` behaviour.\n\n This content is commonly persisted on disk as an `index` file at the root of\n the git tree. This file format can be read using the `from_iodevice/1` function\n and written using `to_iodevice/1`. If a different persistence mechanism is\n required, you may provide that e elsewhere.\n\n Changes in the working tree can be detected by comparing the modification times\n to the cached modification time within the dir cache.\n\n Index files are also used during merges, where the merge happens within the\n index file first, and the working directory is updated as a post-merge step.\n Conflicts are stored in the index file to allow tool (and human) based\n resolutions to be easily performed.\n \"\"\"\n\n use Bitwise\n use Xgit.FileMode\n\n import Xgit.Util.ForceCoverage\n\n require Logger\n\n alias Xgit.FilePath\n alias Xgit.ObjectId\n alias Xgit.Tree\n alias Xgit.Util.Comparison\n alias Xgit.Util.NB\n alias Xgit.Util.TrailingHashDevice\n\n @typedoc ~S\"\"\"\n Version number for an index file.\n \"\"\"\n @type version :: 2..4\n\n @typedoc ~S\"\"\"\n This struct describes an entire working tree as understood by git.\n\n ## Struct Members\n\n * `:version`: the version number as read from disk (typically 2, 3, or 4)\n * `:entry_count`: the number of items in `entries`\n * `:entries`: a list of `Entry` structs in sorted order\n * `:extensions`: a list of `Extension` structs (not yet implemented)\n \"\"\"\n @type t :: %__MODULE__{\n version: version,\n entry_count: non_neg_integer,\n entries: [__MODULE__.Entry.t()]\n # extensions: [Extension.t()]\n }\n\n @enforce_keys [:version, :entry_count, :entries]\n defstruct [:version, :entry_count, :entries]\n\n defmodule Entry do\n @moduledoc ~S\"\"\"\n A single file (or stage of a file) in a directory cache.\n\n An entry represents exactly one stage of a file. If a file path is unmerged\n then multiple instances may appear for the same path name.\n \"\"\"\n\n use Xgit.FileMode\n\n alias Xgit.FileMode\n alias Xgit.FilePath\n alias Xgit.ObjectId\n\n @typedoc ~S\"\"\"\n Merge status (stage).\n \"\"\"\n @type stage :: 0..3\n\n @typedoc ~S\"\"\"\n Merge status (stage) for matching a remove request. (Includes `:all` to match any stage.)\n \"\"\"\n @type stage_match :: 0..3 | :all\n\n @typedoc ~S\"\"\"\n A single file (or stage of a file) in a directory cache.\n\n An entry represents exactly one stage of a file. If a file path is unmerged\n then multiple instances may appear for the same path name.\n\n Consult the [documentation for git index file format](https://github.com/git/git/blob/master/Documentation/technical/index-format.txt)\n for a more detailed description of each item.\n\n ## Struct Members\n\n * `name`: (`FilePath.t`) entry path name, relative to top-level directory (without leading slash)\n * `stage`: (`0..3`) merge status\n * `object_id`: (`ObjectId.t`) SHA-1 for the represented object\n * `mode`: (`FileMode.t`)\n * `size`: (integer) on-disk size, possibly truncated to 32 bits\n * `ctime`: (integer) the last time the file's metadata changed\n * `ctime_ns`: (integer) nanosecond fraction of `ctime` (if available)\n * `mtime`: (integer) the last time a file's contents changed\n * `mtime_ns`: (integer) nanosecond fractino of `mtime` (if available)\n * `dev`: (integer)\n * `ino`: (integer)\n * `uid`: (integer)\n * `gid`: (integer)\n * `assume_valid?`: (boolean)\n * `extended?`: (boolean)\n * `skip_worktree?`: (boolean)\n * `intent_to_add?`: (boolean)\n \"\"\"\n @type t :: %__MODULE__{\n name: FilePath.t(),\n stage: stage,\n object_id: ObjectId.t(),\n mode: FileMode.t(),\n size: non_neg_integer,\n ctime: integer,\n ctime_ns: non_neg_integer,\n mtime: integer,\n mtime_ns: non_neg_integer,\n dev: integer,\n ino: integer,\n uid: integer,\n gid: integer,\n assume_valid?: boolean,\n extended?: boolean,\n skip_worktree?: boolean,\n intent_to_add?: boolean\n }\n\n @enforce_keys [:name, :stage, :object_id, :size, :mode, :ctime, :mtime]\n\n defstruct [\n :name,\n :stage,\n :object_id,\n :size,\n :mode,\n :ctime,\n :mtime,\n ctime_ns: 0,\n mtime_ns: 0,\n dev: 0,\n ino: 0,\n uid: 0,\n gid: 0,\n assume_valid?: false,\n extended?: false,\n skip_worktree?: false,\n intent_to_add?: false\n ]\n\n @doc ~S\"\"\"\n Return `true` if this entry struct describes a valid dir cache entry.\n \"\"\"\n @spec valid?(entry :: any) :: boolean\n def valid?(entry)\n\n # credo:disable-for-lines:30 Credo.Check.Refactor.CyclomaticComplexity\n def valid?(\n %__MODULE__{\n name: name,\n stage: stage,\n object_id: object_id,\n mode: mode,\n size: size,\n ctime: ctime,\n ctime_ns: ctime_ns,\n mtime: mtime,\n mtime_ns: mtime_ns,\n dev: dev,\n ino: ino,\n uid: uid,\n gid: gid,\n assume_valid?: assume_valid?,\n extended?: extended?,\n skip_worktree?: skip_worktree?,\n intent_to_add?: intent_to_add?\n } = _entry\n )\n when is_list(name) and is_integer(stage) and stage >= 0 and stage <= 3 and\n is_binary(object_id) and\n is_file_mode(mode) and\n is_integer(size) and\n size >= 0 and\n is_integer(ctime) and\n is_integer(ctime_ns) and ctime_ns >= 0 and\n is_integer(mtime) and\n is_integer(mtime_ns) and mtime_ns >= 0 and\n is_integer(dev) and\n is_integer(ino) and\n is_integer(uid) and\n is_integer(gid) and\n is_boolean(assume_valid?) and\n is_boolean(extended?) and\n is_boolean(skip_worktree?) and\n is_boolean(intent_to_add?) do\n FilePath.valid?(name) && ObjectId.valid?(object_id) && object_id != ObjectId.zero()\n end\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Compare two entries according to git dir cache entry sort ordering rules.\n\n For this purpose, only the following fields are considered (in this priority order):\n\n * `:name`\n * `:stage`\n\n ## Return Value\n\n * `:lt` if `entry1` sorts before `entry2`.\n * `:eq` if they are the same.\n * `:gt` if `entry1` sorts after `entry2`.\n \"\"\"\n @spec compare(entry1 :: t | nil, entry2 :: t) :: Comparison.result()\n def compare(entry1, entry2)\n\n def compare(nil, _entry2), do: cover(:lt)\n\n def compare(\n %{name: name1, stage: stage1} = _entry1,\n %{name: name2, stage: stage2} = _entry2\n ) do\n cond do\n name1 < name2 -> cover :lt\n name2 < name1 -> cover :gt\n stage1 < stage2 -> cover :lt\n stage2 < stage1 -> cover :gt\n true -> cover :eq\n end\n end\n end\n\n @doc ~S\"\"\"\n Returns a dir cache that is the canonical \"empty\" dir cache (i.e. contains no entries).\n \"\"\"\n @spec empty() :: t\n def empty, do: %__MODULE__{version: 2, entry_count: 0, entries: []}\n\n @doc ~S\"\"\"\n Return `true` if the value is a `DirCache` struct that is valid.\n\n All of the following must be true for this to occur:\n * The value is a `DirCache` struct.\n * The version is supported by Xgit. (Currently, only version 2 is supported.)\n * The `entry_count` matches the actual number of entries.\n * The entries are properly sorted.\n * All entries are valid, as determined by `Xgit.DirCache.Entry.valid?/1`.\n \"\"\"\n @spec valid?(dir_cache :: any) :: boolean\n def valid?(dir_cache)\n\n def valid?(%__MODULE__{version: version, entry_count: entry_count, entries: entries})\n when version == 2 and is_integer(entry_count) and is_list(entries) do\n Enum.count(entries) == entry_count &&\n Enum.all?(entries, &Entry.valid?/1) &&\n entries_sorted?([nil | entries])\n end\n\n def valid?(_), do: cover(false)\n\n defp entries_sorted?([entry1, entry2 | tail]) do\n Entry.compare(entry1, entry2) == :lt &&\n (entry1 == nil ||\n not FilePath.starts_with?(entry2.name, FilePath.ensure_trailing_separator(entry1.name))) &&\n entries_sorted?([entry2 | tail])\n end\n\n defp entries_sorted?([_]), do: cover(true)\n\n @doc ~S\"\"\"\n Return `true` if all of the entries in this dir cache are fully merged (stage 0).\n \"\"\"\n @spec fully_merged?(dir_cache :: t) :: boolean\n def fully_merged?(%__MODULE__{entries: entries} = _dir_cache) do\n Enum.all?(entries, fn %__MODULE__.Entry{stage: stage} -> stage == 0 end)\n end\n\n @typedoc ~S\"\"\"\n Error reason codes returned by `add_entries/2`.\n \"\"\"\n @type add_entries_reason :: :invalid_dir_cache | :invalid_entries | :duplicate_entries\n\n @doc ~S\"\"\"\n Returns a dir cache that has new directory entries added in.\n\n In the event of a collision between entries (same path and stage), the existing\n entry will be replaced by the new one.\n\n ## Parameters\n\n `entries` a list of entries to add to the existing dir cache\n\n ## Return Value\n\n `{:ok, dir_cache}` where `dir_cache` is the original `dir_cache` with the new\n entries added (and properly sorted).\n\n `{:error, :invalid_dir_cache}` if the original `dir_cache` was invalid.\n\n `{:error, :invalid_entries}` if one or more of the entries is invalid.\n\n `{:error, :duplicate_entries}` if one or more of the entries in the _new_ list\n are duplicates of other entries in the _new_ list. (As stated earlier, duplicates\n from the original list are acceptable; in that event, the new entry will replace\n the old one.)\n \"\"\"\n @spec add_entries(dir_cache :: t, new_entries :: [Entry.t()]) ::\n {:ok, t} | {:error, add_entries_reason}\n def add_entries(%__MODULE__{entries: existing_entries} = dir_cache, new_entries)\n when is_list(new_entries) do\n with {:dir_cache_valid?, true} <- {:dir_cache_valid?, valid?(dir_cache)},\n {:entries_valid?, true} <- {:entries_valid?, Enum.all?(new_entries, &Entry.valid?/1)},\n sorted_new_entries <- Enum.sort_by(new_entries, &{&1.name, &1.stage}),\n {:duplicates, ^sorted_new_entries} <-\n {:duplicates, Enum.dedup_by(sorted_new_entries, &{&1.name, &1.stage})} do\n combined_entries = combine_entries(existing_entries, sorted_new_entries)\n\n cover {:ok,\n %{dir_cache | entry_count: Enum.count(combined_entries), entries: combined_entries}}\n else\n {:dir_cache_valid?, _} -> cover {:error, :invalid_dir_cache}\n {:entries_valid?, _} -> cover {:error, :invalid_entries}\n {:duplicates, _} -> cover {:error, :duplicate_entries}\n end\n end\n\n defp combine_entries(existing_entries, sorted_new_entries)\n\n defp combine_entries(existing_entries, []), do: cover(existing_entries)\n defp combine_entries([], sorted_new_entries), do: cover(sorted_new_entries)\n\n defp combine_entries(\n [existing_head | existing_tail] = existing_entries,\n [new_head | new_tail] = sorted_new_entries\n ) do\n case Entry.compare(existing_head, new_head) do\n :lt -> cover [existing_head | combine_entries(existing_tail, sorted_new_entries)]\n :eq -> cover [new_head | combine_entries(existing_tail, new_tail)]\n :gt -> cover [new_head | combine_entries(existing_entries, new_tail)]\n end\n end\n\n @typedoc ~S\"\"\"\n An entry for the `remove` option for `remove_entries/2`.\n \"\"\"\n @type entry_to_remove :: {path :: FilePath.t(), stage :: Entry.stage_match()}\n\n @typedoc ~S\"\"\"\n Error reason codes returned by `remove_entries/2`.\n \"\"\"\n @type remove_entries_reason :: :invalid_dir_cache | :invalid_entries\n\n @doc ~S\"\"\"\n Returns a dir cache that has some directory entries removed.\n\n ## Parameters\n\n `entries_to_remove` is a list of `{path, stage}` tuples identifying tuples to be removed.\n\n * `path` should be a byte list for the path.\n * `stage` should be `0..3` or `:all`, meaning any entry that matches the path,\n regardless of stage, should be removed.\n\n ## Return Value\n\n `{:ok, dir_cache}` where `dir_cache` is the original `dir_cache` with any matching\n entries removed.\n\n `{:error, :invalid_dir_cache}` if the original `dir_cache` was invalid.\n\n `{:error, :invalid_entries}` if one or more of the entries is invalid.\n \"\"\"\n @spec remove_entries(dir_cache :: t, entries_to_remove :: [entry_to_remove]) ::\n {:ok, t} | {:error, remove_entries_reason}\n def remove_entries(%__MODULE__{entries: existing_entries} = dir_cache, entries_to_remove)\n when is_list(entries_to_remove) do\n with {:dir_cache_valid?, true} <- {:dir_cache_valid?, valid?(dir_cache)},\n {:entries_valid?, true} <-\n {:entries_valid?, Enum.all?(entries_to_remove, &valid_remove_entry?/1)} do\n updated_entries = remove_matching_entries(existing_entries, Enum.sort(entries_to_remove))\n\n cover {:ok,\n %{dir_cache | entry_count: Enum.count(updated_entries), entries: updated_entries}}\n else\n {:dir_cache_valid?, _} -> cover {:error, :invalid_dir_cache}\n {:entries_valid?, _} -> cover {:error, :invalid_entries}\n end\n end\n\n defp valid_remove_entry?({path, :all}) when is_list(path), do: cover(true)\n\n defp valid_remove_entry?({path, stage})\n when is_list(path) and is_integer(stage) and stage >= 0 and stage <= 3,\n do: cover(true)\n\n defp valid_remove_entry?(_), do: cover(false)\n\n defp remove_matching_entries(sorted_existing_entries, sorted_entries_to_remove)\n\n defp remove_matching_entries([], _sorted_entries_to_remove), do: cover([])\n defp remove_matching_entries(sorted_existing_entries, []), do: cover(sorted_existing_entries)\n\n defp remove_matching_entries([%__MODULE__.Entry{name: path} | existing_tail], [\n {path, :all} | remove_tail\n ]),\n do:\n remove_matching_entries(Enum.drop_while(existing_tail, &(&1.name == path)), remove_tail)\n\n defp remove_matching_entries([%__MODULE__.Entry{name: path, stage: stage} | existing_tail], [\n {path, stage} | remove_tail\n ]),\n do: remove_matching_entries(existing_tail, remove_tail)\n\n defp remove_matching_entries([existing_head | existing_tail], sorted_entries_to_remove),\n do: cover([existing_head | remove_matching_entries(existing_tail, sorted_entries_to_remove)])\n\n @typedoc ~S\"\"\"\n Error reason codes returned by `to_tree_objects/2`.\n \"\"\"\n @type to_tree_objects_reason :: :invalid_dir_cache | :prefix_not_found\n\n @doc ~S\"\"\"\n Convert this `DirCache` to one or more `tree` objects.\n\n ## Parameters\n\n `prefix`: (`Xgit.FilePath`) if present, return the object ID for the tree\n pointed to by `prefix`. All tree objects will be generated, regardless of `prefix`.\n\n ## Return Value\n\n `{:ok, objects, prefix_tree}` where `objects` is a list of `Xgit.Object`\n structs of type `tree`. All others must be written or must be present in the\n object database for the top-level tree to be valid. `prefix_tree` is the\n tree for the subtree specified by `prefix` or the top-level tree if no prefix\n was specified.\n\n `{:error, :invalid_dir_cache}` if the `DirCache` is not valid.\n\n `{:error, :prefix_not_found}` if no tree matching `prefix` exists.\n \"\"\"\n @spec to_tree_objects(dir_cache :: t, prefix :: Xgit.FilePath.t()) ::\n {:ok, [Xgit.Object.t()], Xgit.Object.t()} | {:error, to_tree_objects_reason}\n def to_tree_objects(dir_cache, prefix \\\\ [])\n\n def to_tree_objects(%__MODULE__{entries: entries} = dir_cache, prefix)\n when is_list(entries) and is_list(prefix) do\n with {:valid?, true} <- {:valid?, valid?(dir_cache)},\n {_entries, tree_for_prefix, _this_tree} <- to_tree_objects_inner(entries, [], %{}, []),\n {:prefix, prefix_tree} when prefix_tree != nil <-\n {:prefix, Map.get(tree_for_prefix, FilePath.ensure_trailing_separator(prefix))} do\n objects =\n tree_for_prefix\n |> Enum.sort()\n |> Enum.map(fn {_prefix, object} -> object end)\n\n cover {:ok, objects, prefix_tree}\n else\n {:valid?, _} -> cover {:error, :invalid_dir_cache}\n {:prefix, _} -> cover {:error, :prefix_not_found}\n end\n end\n\n defp to_tree_objects_inner(entries, prefix, tree_for_prefix, tree_entries_acc)\n\n defp to_tree_objects_inner([], prefix, tree_for_prefix, tree_entries_acc),\n do: make_tree_and_continue([], prefix, tree_for_prefix, tree_entries_acc)\n\n defp to_tree_objects_inner(\n [%__MODULE__.Entry{name: name, object_id: object_id, mode: mode} | tail] = entries,\n prefix,\n tree_for_prefix,\n tree_entries_acc\n ) do\n if FilePath.starts_with?(name, prefix) do\n name_after_prefix = Enum.drop(name, Enum.count(prefix))\n\n {next_entries, new_tree_entry, tree_for_prefix} =\n if Enum.any?(name_after_prefix, &(&1 == ?/)) do\n make_subtree(entries, prefix, tree_for_prefix, tree_entries_acc)\n else\n cover {tail, %Tree.Entry{name: name_after_prefix, object_id: object_id, mode: mode},\n tree_for_prefix}\n end\n\n to_tree_objects_inner(next_entries, prefix, tree_for_prefix, [\n new_tree_entry | tree_entries_acc\n ])\n else\n make_tree_and_continue(entries, prefix, tree_for_prefix, tree_entries_acc)\n end\n end\n\n defp make_tree_and_continue(entries, prefix, tree_for_prefix, tree_entries_acc) do\n tree_object = Tree.to_object(%Tree{entries: Enum.reverse(tree_entries_acc)})\n {entries, Map.put(tree_for_prefix, prefix, tree_object), tree_object}\n end\n\n defp make_subtree(\n [%__MODULE__.Entry{name: name} | _tail] = entries,\n existing_prefix,\n tree_for_prefix,\n _tree_entries_acc\n ) do\n first_segment_after_prefix =\n name\n |> Enum.drop(Enum.count(existing_prefix))\n |> Enum.drop_while(&(&1 == ?/))\n |> Enum.take_while(&(&1 != ?/))\n\n tree_name =\n cover '#{FilePath.ensure_trailing_separator(existing_prefix)}#{first_segment_after_prefix}'\n\n new_prefix = cover '#{tree_name}/'\n\n {entries, tree_for_prefix, tree_object} =\n to_tree_objects_inner(entries, new_prefix, tree_for_prefix, [])\n\n new_tree_entry = %Tree.Entry{\n name: first_segment_after_prefix,\n object_id: tree_object.id,\n mode: FileMode.tree()\n }\n\n cover {entries, new_tree_entry, tree_for_prefix}\n end\n\n @typedoc ~S\"\"\"\n Error codes which can be returned by `from_iodevice/1`.\n \"\"\"\n @type from_iodevice_reason ::\n :not_sha_hash_device\n | :invalid_format\n | :unsupported_version\n | :too_many_entries\n | :unsupported_extension\n | :sha_hash_mismatch\n | File.posix()\n\n @doc ~S\"\"\"\n Read a git `index` file from an `IO.device` (typically an opened file) and returns a\n corresponding `DirCache` struct.\n\n _IMPORTANT:_ The `IO.device` must be created using `Xgit.Util.TrailingHashDevice`.\n\n ## Return Value\n\n `{:ok, dir_cache}` if the iodevice contains a valid index file.\n\n `{:error, :not_sha_hash_device}` if the iodevice was not created using\n `Xgit.Util.TrailingHashDevice`.\n\n `{:error, :invalid_format}` if the iodevice can not be parsed as an index file.\n\n `{:error, :unsupported_version}` if the index file is not a version 2 index file.\n Other versions are not supported at this time.\n\n `{:error, :too_many_entries}` if the index files contains more than 100,000\n entries. This is an arbitrary limit to guard against malformed files and to\n prevent overconsumption of memory. With experience, it could be revisited.\n\n `{:error, :unsupported_extension}` if any index file extensions are present\n that can not be parsed. Optional extensions will be skipped, but no required\n extensions are understood at this time. (See\n [issue #172](https://github.com/elixir-git/xgit/issues/172).)\n\n `{:error, :sha_hash_mismatch}` if the SHA-1 hash written at the end of the file\n does not match the file contents.\n \"\"\"\n @spec from_iodevice(iodevice :: IO.device()) ::\n {:ok, dir_cache :: t()} | {:error, reason :: from_iodevice_reason}\n def from_iodevice(iodevice) do\n with {:sha_hash_device, true} <- {:sha_hash_device, TrailingHashDevice.valid?(iodevice)},\n {:dirc, true} <- {:dirc, read_dirc(iodevice)},\n {:version, version = 2} <- {:version, read_uint32(iodevice)},\n {:entry_count, entry_count}\n when is_integer(entry_count) and entry_count <= 100_000 <-\n {:entry_count, read_uint32(iodevice)},\n {:entries, entries} when is_list(entries) <-\n {:entries, read_entries(iodevice, version, entry_count)},\n {:extensions, :ok} <- {:extensions, read_extensions(iodevice)},\n {:sha_valid?, true} <- {:sha_valid?, TrailingHashDevice.valid_hash?(iodevice)} do\n cover {:ok,\n %__MODULE__{\n version: version,\n entry_count: entry_count,\n entries: entries\n }}\n else\n {:sha_hash_device, _} -> cover {:error, :not_sha_hash_device}\n {:dirc, _} -> cover {:error, :invalid_format}\n {:version, _} -> cover {:error, :unsupported_version}\n {:entry_count, :invalid} -> cover {:error, :invalid_format}\n {:entry_count, _} -> cover {:error, :too_many_entries}\n {:entries, _} -> cover {:error, :invalid_format}\n {:extensions, error} -> cover {:error, error}\n {:sha_valid?, _} -> cover {:error, :sha_hash_mismatch}\n end\n end\n\n defp read_dirc(iodevice) do\n case IO.binread(iodevice, 4) do\n \"DIRC\" -> cover true\n _ -> cover false\n end\n end\n\n defp read_entries(_iodevice, _version, 0), do: cover([])\n\n defp read_entries(iodevice, version, entry_count) do\n entries =\n Enum.map(1..entry_count, fn _ ->\n read_entry(iodevice, version)\n end)\n\n if Enum.all?(entries, &valid_entry?/1) do\n cover entries\n else\n cover :invalid\n end\n end\n\n defp read_entry(iodevice, 2 = _version) do\n with ctime when is_integer(ctime) <- read_uint32(iodevice),\n ctime_ns when is_integer(ctime_ns) <- read_uint32(iodevice),\n mtime when is_integer(mtime) <- read_uint32(iodevice),\n mtime_ns when is_integer(mtime_ns) <- read_uint32(iodevice),\n dev when is_integer(dev) <- read_uint32(iodevice),\n ino when is_integer(ino) <- read_uint32(iodevice),\n mode when is_integer(mode) <- read_uint32(iodevice),\n uid when is_integer(uid) <- read_uint32(iodevice),\n gid when is_integer(gid) <- read_uint32(iodevice),\n size when is_integer(size) <- read_uint32(iodevice),\n object_id\n when is_binary(object_id) and object_id != \"0000000000000000000000000000000000000000\" <-\n read_object_id(iodevice),\n flags when is_integer(flags) and flags > 0 <- read_uint16(iodevice),\n name when is_list(name) <- read_name(iodevice, flags &&& 0xFFF) do\n %__MODULE__.Entry{\n name: name,\n stage: bsr(flags &&& 0x3000, 12),\n object_id: object_id,\n size: size,\n mode: mode,\n ctime: ctime,\n ctime_ns: ctime_ns,\n mtime: mtime,\n mtime_ns: mtime_ns,\n dev: dev,\n ino: ino,\n uid: uid,\n gid: gid,\n assume_valid?: to_boolean(flags &&& 0x8000),\n extended?: to_boolean(flags &&& 0x4000),\n skip_worktree?: false,\n intent_to_add?: false\n }\n else\n _ -> cover :invalid\n end\n end\n\n defp valid_entry?(%__MODULE__.Entry{}), do: cover(true)\n defp valid_entry?(_), do: cover(false)\n\n defp read_extensions(iodevice) do\n case IO.binread(iodevice, 1) do\n :eof ->\n :ok\n\n char when byte_size(char) == 1 and char >= \"A\" and char <= \"Z\" ->\n read_optional_extension(iodevice, char)\n\n char ->\n read_required_extension(iodevice, char)\n end\n end\n\n defp read_optional_extension(iodevice, char) do\n signature = \"#{char}#{IO.binread(iodevice, 3)}\"\n length = read_uint32(iodevice)\n\n Logger.info(fn ->\n \"skipping extension with signature #{inspect(signature)}, #{length} bytes\"\n end)\n\n IO.binread(iodevice, length)\n read_extensions(iodevice)\n end\n\n defp read_required_extension(iodevice, char) do\n signature = \"#{char}#{IO.binread(iodevice, 3)}\"\n length = read_uint32(iodevice)\n\n Logger.info(fn ->\n \"don't know how to read required extension with signature #{inspect(signature)}, #{length} bytes\"\n end)\n\n :unsupported_extension\n end\n\n defp read_uint16(iodevice) do\n case IO.binread(iodevice, 2) do\n x when is_binary(x) and byte_size(x) == 2 ->\n x\n |> :binary.bin_to_list()\n |> NB.decode_uint16()\n |> elem(0)\n\n _ ->\n cover :invalid\n end\n end\n\n defp read_uint32(iodevice) do\n case IO.binread(iodevice, 4) do\n x when is_binary(x) and byte_size(x) == 4 ->\n x\n |> :binary.bin_to_list()\n |> NB.decode_uint32()\n |> elem(0)\n\n _ ->\n cover :invalid\n end\n end\n\n defp read_object_id(iodevice) do\n case IO.binread(iodevice, 20) do\n x when is_binary(x) and byte_size(x) == 20 -> ObjectId.from_binary_iodata(x)\n _ -> cover :invalid\n end\n end\n\n defp read_name(iodevice, length) when length < 0xFFF do\n bytes_to_read = length + padding_size(Integer.mod(length + 4, 8))\n\n case IO.binread(iodevice, bytes_to_read) do\n x when is_binary(x) and byte_size(x) == bytes_to_read ->\n x\n |> :binary.bin_to_list()\n |> Enum.take_while(&(&1 != 0))\n\n _ ->\n cover :invalid\n end\n end\n\n defp read_name(_iodevice, _length), do: :invalid\n\n defp padding_size(length_mod_8) when length_mod_8 < 6, do: 6 - length_mod_8\n defp padding_size(6), do: cover(8)\n defp padding_size(7), do: cover(7)\n\n defp to_boolean(0), do: cover(false)\n defp to_boolean(_), do: cover(true)\n\n @typedoc ~S\"\"\"\n Error codes which can be returned by `to_iodevice/1`.\n \"\"\"\n @type to_iodevice_reason ::\n :not_sha_hash_device | :invalid_dir_cache | :unsupported_version | File.posix()\n\n @doc ~S\"\"\"\n Write index file to an `iodevice` (typically an opened file) from a\n `DirCache` struct.\n\n _IMPORTANT:_ The `iodevice` must be created using `Xgit.Util.TrailingHashDevice`.\n\n ## Return Value\n\n `:ok` if written successfully.\n\n `{:error, :not_sha_hash_device}` if the iodevice was not created using\n `Xgit.Util.TrailingHashDevice`.\n\n `{:error, :invalid_dir_cache}` if `valid?/1` does not return\n `true` for this struct.\n\n `{:error, :unsupported_version}` if the `version` flag in the dir cache struct\n is not version. Other versions are not supported at this time.\n\n `{:error, posix_reason}` if an I/O error occurs.\n \"\"\"\n @spec to_iodevice(dir_cache :: t(), iodevice :: IO.device()) ::\n :ok | {:error, reason :: to_iodevice_reason}\n def to_iodevice(\n %__MODULE__{version: version, entry_count: entry_count, entries: entries} = dir_cache,\n iodevice\n ) do\n with {:version, 2} <- {:version, version},\n {:valid?, true} <- {:valid?, valid?(dir_cache)},\n {:sha_hash_device, true} <- {:sha_hash_device, TrailingHashDevice.valid?(iodevice)},\n :ok <- write_v2_header(iodevice, entry_count),\n :ok <- write_v2_entries(iodevice, entries) do\n # TO DO: Write extensions. https://github.com/elixir-git/xgit/issues/114\n cover :ok\n else\n {:version, _} -> cover {:error, :unsupported_version}\n {:valid?, _} -> cover {:error, :invalid_dir_cache}\n {:sha_hash_device, _} -> cover {:error, :not_sha_hash_device}\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp write_v2_header(iodevice, entry_count),\n do: IO.binwrite(iodevice, ['DIRC', 0, 0, 0, 2, NB.encode_uint32(entry_count)])\n\n defp write_v2_entries(_iodevice, []), do: cover(:ok)\n\n defp write_v2_entries(iodevice, [entry | tail]) do\n case write_v2_entry(iodevice, entry) do\n :ok -> write_v2_entries(iodevice, tail)\n error -> error\n end\n end\n\n defp write_v2_entry(\n iodevice,\n %__MODULE__.Entry{\n name: name,\n stage: stage,\n object_id: object_id,\n size: size,\n mode: mode,\n ctime: ctime,\n ctime_ns: ctime_ns,\n mtime: mtime,\n mtime_ns: mtime_ns,\n dev: dev,\n ino: ino,\n uid: uid,\n gid: gid,\n assume_valid?: assume_valid?,\n extended?: extended?,\n skip_worktree?: false,\n intent_to_add?: false\n }\n ) do\n name_length = Enum.count(name)\n\n IO.binwrite(iodevice, [\n NB.encode_uint32(ctime),\n NB.encode_uint32(ctime_ns),\n NB.encode_uint32(mtime),\n NB.encode_uint32(mtime_ns),\n NB.encode_uint32(dev),\n NB.encode_uint32(ino),\n NB.encode_uint32(mode),\n NB.encode_uint32(uid),\n NB.encode_uint32(gid),\n NB.encode_uint32(size),\n ObjectId.to_binary_iodata(object_id),\n encode_v2_flags(stage, assume_valid?, extended?, name_length),\n name,\n padding(name_length)\n ])\n end\n\n defp encode_v2_flags(stage, assume_valid?, extended?, name_length) do\n value =\n value_if_boolean(assume_valid?, 0x8000) +\n value_if_boolean(extended?, 0x4000) +\n bsl(stage &&& 3, 12) +\n min(name_length, 0xFFF)\n\n NB.encode_uint16(value)\n end\n\n defp value_if_boolean(true, value), do: value\n defp value_if_boolean(false, _value), do: cover(0)\n\n defp padding(name_length) do\n padding_size = padding_size(Integer.mod(name_length + 4, 8))\n Enum.map(1..padding_size, fn _ -> 0 end)\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,620,570,570,null,50,null,null,null,570,26,3951,null,562,17718,null,562,8,null],"name":"lib/xgit/util/parse_header.ex","source":"defmodule Xgit.Util.ParseHeader do\n @moduledoc false\n\n # Internal utility for parsing headers from commit and tag objects.\n\n import Xgit.Util.ForceCoverage\n\n @doc ~S\"\"\"\n Returns the next header that can be parsed from the charlist `b`.\n\n As of this writing, will not parse headers that span multiple lines.\n This may be added later if needed.\n\n ## Return Values\n\n `{'header_name', 'header_value', next_data}` if a header is successfully\n identified. `next_data` will be advanced immediately past the LF that\n terminates this header.\n\n `:no_header_found` if unable to find a header at this location.\n \"\"\"\n @spec next_header(b :: charlist) ::\n {header :: charlist, value :: charlist, next_data :: charlist} | :no_header_found\n def next_header(b) when is_list(b) do\n with {[_ | _] = header, [?\\s | next]} <- Enum.split_while(b, &header_char?/1),\n {value, next} <- Enum.split_while(next, &value_char?/1) do\n cover {header, value, skip_next_lf(next)}\n else\n _ -> cover :no_header_found\n end\n end\n\n defp header_char?(32), do: cover(false)\n defp header_char?(10), do: cover(false)\n defp header_char?(_), do: cover(true)\n\n defp value_char?(10), do: cover(false)\n defp value_char?(_), do: cover(true)\n\n defp skip_next_lf([10 | next]), do: cover(next)\n defp skip_next_lf(next), do: cover(next)\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,226,null,49,null,177,null,null,164,null,176,null,1,1,null,null,null],"name":"lib/xgit/util/file_utils.ex","source":"defmodule Xgit.Util.FileUtils do\n @moduledoc false\n\n # Internal utility for recursively listing the contents of a directory.\n\n import Xgit.Util.ForceCoverage\n\n @doc ~S\"\"\"\n Recursively list the files of a directory.\n\n Directories are scanned, but their paths are not reported as part of the result.\n \"\"\"\n @spec recursive_files!(path :: Path.t()) :: [Path.t()]\n def recursive_files!(path \\\\ \".\") do\n cond do\n File.regular?(path) ->\n cover [path]\n\n File.dir?(path) ->\n path\n |> File.ls!()\n |> Enum.map(&Path.join(path, &1))\n |> Enum.map(&recursive_files!/1)\n |> Enum.concat()\n\n true ->\n cover []\n end\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,105,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,88,93,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,18,null,null,null,null,null,null,null,198,198,null,null,null,null,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,96,null,null,null,105,105,103,null,null,null,null,null,null,null,2,null,null,null,null,93,null,89,null,null,null,null,null,null,null,null,4,null,null,null,null,18,null,18,null,null,null,null,null,null,null,null,null,null,2826,2826,null,null,null,158,158,null,null,null,1,1,null,null,null,null,195,null,null,1,null,null,1,null,null,null,null,null,null,null,null,93,93,null,93,null,null,1,null,null,1,1,null,null,null,2826,2826,2826,null,null,null,null,null,null,null,99,null,null,1,null,null,null,null,null,null,null,2485,null,2485,2485,2485,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,6,4,4,null,4,null,2,null,null,null,null,null,null,null,null,null,null,null,null,234,234,null,234,null,null,null,1,1,null,null,null,158,158,158,null,null,null,null,null,null,85,85,null,85,null,null,null,72,null,null,1,1,null,null],"name":"lib/xgit/util/trailing_hash_device.ex","source":"defmodule Xgit.Util.TrailingHashDevice do\n @moduledoc false\n\n # Creates an `iodevice` process that supports git file formats with a trailing\n # SHA-1 hash.\n\n # When reading, the trailing 20 bytes are interpreted as a SHA-1 hash of the\n # remaining file contents and can be verified using the `valid_hash?/1` function.\n\n # This is an admittedly minimal implementation; just enough is implemented to\n # allow Xgit's index file parser to do its work.\n\n use GenServer\n\n import Xgit.Util.ForceCoverage\n\n require Logger\n\n @doc ~S\"\"\"\n Creates an IO device that reads a file with trailing hash.\n\n Unlike `File.open/2` and `File.open/3`, no options or function are\n accepted.\n\n This device can be passed to `IO.binread/2`.\n\n ## Return Value\n\n `{:ok, pid}` where `pid` points to an IO device process.\n\n `{:ok, reason}` if the file could not be opened. See `File.open/2` for\n possible values for `reason`.\n \"\"\"\n @spec open_file(path :: Path.t()) :: {:ok, pid} | {:error, File.posix()}\n def open_file(path) when is_binary(path),\n do: GenServer.start_link(__MODULE__, {:file, path})\n\n @doc ~S\"\"\"\n Creates an IO device that writes to a file with trailing hash.\n\n Unlike `File.open/2` and `File.open/3`, no options or function are\n accepted.\n\n This device can be passed to `IO.binwrite/2`.\n\n ## Options\n\n `:max_file_size` (non-negative integer) may be passed, which will cause a\n failure after the _n_th byte is written. This is intended for internal\n testing purposes.\n\n ## Return Value\n\n `{:ok, pid}` where `pid` points to an IO device process.\n\n `{:ok, reason}` if the file could not be opened. See `File.open/2` for\n possible values for `reason`.\n \"\"\"\n @spec open_file_for_write(path :: Path.t(), opts :: Keyword.t()) ::\n {:ok, pid} | {:error, File.posix()}\n def open_file_for_write(path, opts \\\\ []) when is_binary(path) and is_list(opts),\n do: GenServer.start_link(__MODULE__, {:file_write, path, opts})\n\n @doc ~S\"\"\"\n Creates an IO device that reads a string with trailing hash.\n\n This is intended mostly for internal testing purposes.\n\n Unlike `StringIO.open/2` and `StringIO.open/3`, no options or function are\n accepted.\n\n This device can be passed to `IO.binread/2`.\n\n ## Return Value\n\n `{:ok, pid}` where `pid` points to an IO device process.\n \"\"\"\n @spec open_string(s :: binary) :: {:ok, pid}\n def open_string(s) when is_binary(s) and byte_size(s) >= 20,\n do: GenServer.start_link(__MODULE__, {:string, s})\n\n @doc ~S\"\"\"\n Returns `true` if this is process is an `TrailingHashDevice` instance.\n\n Note the difference between this function and `valid_hash?/1`.\n \"\"\"\n @spec valid?(v :: any) :: boolean\n def valid?(v) when is_pid(v) do\n GenServer.call(v, :valid_trailing_hash_read_device?) == :valid_trailing_hash_read_device\n catch\n :exit, {:timeout, _} -> false\n end\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Returns `true` if the hash at the end of the file matches the hash\n generated while reading the file.\n\n Should only be called once and only once when the entire file (sans SHA-1 hash)\n has been read.\n\n ## Return Values\n\n `true` or `false` if the SHA-1 hash was found and was valid (or not).\n\n `:too_soon` if called before the SHA-1 hash is expected.\n\n `:already_called` if called a second (or successive) time.\n\n `:opened_for_write` if called on a device that was opened for write.\n \"\"\"\n @spec valid_hash?(io_device :: pid) :: boolean\n def valid_hash?(io_device) when is_pid(io_device),\n do: GenServer.call(io_device, :valid_hash?)\n\n @impl true\n def init({:file, path}) do\n with {:ok, %{size: size}} <- File.stat(path, time: :posix),\n {:ok, pid} when is_pid(pid) <- File.open(path) do\n cover {:ok,\n %{\n iodevice: pid,\n mode: :read,\n remaining_bytes: size - 20,\n crypto: :crypto.hash_init(:sha)\n }}\n else\n {:error, reason} -> cover {:stop, reason}\n end\n end\n\n def init({:file_write, path, opts}) do\n case File.open(path, [:write]) do\n {:ok, pid} when is_pid(pid) ->\n cover {:ok,\n %{\n iodevice: pid,\n mode: :write,\n remaining_bytes: Keyword.get(opts, :max_file_size, :unlimited),\n crypto: :crypto.hash_init(:sha)\n }}\n\n {:error, reason} ->\n cover {:stop, reason}\n end\n end\n\n def init({:string, s}) do\n {:ok, pid} = StringIO.open(s)\n\n cover {:ok,\n %{\n iodevice: pid,\n mode: :read,\n remaining_bytes: byte_size(s) - 20,\n crypto: :crypto.hash_init(:sha)\n }}\n end\n\n @impl true\n def handle_info({:io_request, from, reply_as, req}, state) do\n state = io_request(from, reply_as, req, state)\n cover {:noreply, state}\n end\n\n def handle_info({:file_request, from, reply_as, req}, state) do\n state = file_request(from, reply_as, req, state)\n cover {:noreply, state}\n end\n\n def handle_info(message, state) do\n Logger.warn(\"TrailingHashDevice received unexpected message #{inspect(message)}\")\n cover {:noreply, state}\n end\n\n @impl true\n def handle_call(:valid_trailing_hash_read_device?, _from_, state),\n do: {:reply, :valid_trailing_hash_read_device, state}\n\n def handle_call(:valid_hash?, _from, %{mode: :write} = state),\n do: {:reply, :opened_for_write, state}\n\n def handle_call(:valid_hash?, _from, %{crypto: :done} = state),\n do: {:reply, :already_called, state}\n\n def handle_call(\n :valid_hash?,\n _from,\n %{iodevice: iodevice, mode: :read, remaining_bytes: remaining_bytes, crypto: crypto} =\n state\n )\n when remaining_bytes <= 0 do\n actual_hash = :crypto.hash_final(crypto)\n hash_from_file = IO.binread(iodevice, 20)\n\n {:reply, actual_hash == hash_from_file, %{state | crypto: :done}}\n end\n\n def handle_call(:valid_hash?, _from, state), do: {:reply, :too_soon, state}\n\n def handle_call(request, _from, state) do\n Logger.warn(\"TrailingHashDevice received unexpected call #{inspect(request)}\")\n {:reply, :unknown_message, state}\n end\n\n defp io_request(from, reply_as, req, state) do\n {reply, state} = io_request(req, state)\n send(from, {:io_reply, reply_as, reply})\n state\n end\n\n defp io_request(\n {:get_chars, :\"\", count},\n %{mode: :read, remaining_bytes: remaining_bytes} = state\n )\n when remaining_bytes <= 0 and is_integer(count) and count >= 0 do\n cover {:eof, state}\n end\n\n defp io_request({:get_chars, :\"\", 0}, %{mode: :read} = state), do: cover({\"\", state})\n\n defp io_request(\n {:get_chars, :\"\", count},\n %{iodevice: iodevice, mode: :read, remaining_bytes: remaining_bytes, crypto: crypto} =\n state\n )\n when is_integer(count) and count > 0 do\n data = IO.binread(iodevice, min(remaining_bytes, count))\n\n if is_binary(data) do\n crypto = :crypto.hash_update(crypto, data)\n cover {data, %{state | remaining_bytes: remaining_bytes - byte_size(data), crypto: crypto}}\n else\n # coveralls-ignore-start\n # This will only occur if an I/O error occurs *mid*-file.\n # Difficult to simulate and fairly simple code, so not generating coverage for this line.\n cover {data, state}\n # coveralls-ignore-stop\n end\n end\n\n defp io_request(\n {:put_chars, _encoding, data},\n %{\n iodevice: iodevice,\n mode: :write,\n remaining_bytes: remaining_bytes,\n crypto: crypto\n } = state\n )\n when is_integer(remaining_bytes) do\n if byte_size(data) <= remaining_bytes do\n crypto = :crypto.hash_update(crypto, data)\n IO.binwrite(iodevice, data)\n\n cover {:ok, %{state | remaining_bytes: remaining_bytes - byte_size(data), crypto: crypto}}\n else\n cover {{:error, :eio}, %{state | remaining_bytes: 0}}\n end\n end\n\n defp io_request(\n {:put_chars, _encoding, data},\n %{\n iodevice: iodevice,\n mode: :write,\n remaining_bytes: :unlimited,\n crypto: crypto\n } = state\n ) do\n crypto = :crypto.hash_update(crypto, data)\n IO.binwrite(iodevice, data)\n\n cover {:ok, %{state | crypto: crypto}}\n end\n\n defp io_request(request, state) do\n Logger.warn(\"TrailingHashDevice received unexpected iorequest #{inspect(request)}\")\n cover {{:error, :request}, state}\n end\n\n defp file_request(from, reply_as, req, state) do\n {reply, state} = file_request(req, state)\n send(from, {:file_reply, reply_as, reply})\n state\n end\n\n defp file_request(\n :close,\n %{iodevice: iodevice, mode: :write, crypto: crypto} = state\n ) do\n hash = :crypto.hash_final(crypto)\n IO.binwrite(iodevice, hash)\n\n cover {File.close(iodevice), %{state | iodevice: nil}}\n end\n\n defp file_request(:close, %{iodevice: iodevice} = state),\n do: cover({File.close(iodevice), %{state | iodevice: nil}})\n\n defp file_request(request, state) do\n Logger.warn(\"TrailingHashDevice received unexpected file_request #{inspect(request)}\")\n cover {{:error, :request}, state}\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,508,508,508,null,508,null,null,null,508,null,22,116,370,null,1529,508,null],"name":"lib/xgit/util/parse_decimal.ex","source":"defmodule Xgit.Util.ParseDecimal do\n @moduledoc false\n\n # Internal utility for parsing decimal values from charlist.\n\n import Xgit.Util.ForceCoverage\n\n @doc ~S\"\"\"\n Parse a base-10 numeric value from a charlist of ASCII digits into a number.\n\n Similar to `Integer.parse/2` but uses charlist instead.\n\n Digit sequences can begin with an optional run of spaces before the\n sequence, and may start with a `+` or a `-` to indicate sign position.\n Any other characters will cause the method to stop and return the current\n result to the caller.\n\n Returns `{number, new_buffer}` where `number` is the integer that was\n found (or 0 if no number found there) and `new_buffer` is the charlist\n following the number that was parsed.\n \"\"\"\n @spec from_decimal_charlist(b :: charlist) :: {integer, charlist}\n def from_decimal_charlist(b) when is_list(b) do\n b = skip_white_space(b)\n {sign, b} = parse_sign(b)\n {n, b} = parse_digits(0, b)\n\n cover {sign * n, b}\n end\n\n defp skip_white_space([?\\s | b]), do: skip_white_space(b)\n defp skip_white_space(b), do: b\n\n defp parse_sign([?- | b]), do: cover({-1, b})\n defp parse_sign([?+ | b]), do: cover({1, b})\n defp parse_sign(b), do: cover({1, b})\n\n defp parse_digits(n, [d | b]) when d >= ?0 and d <= ?9, do: parse_digits(n * 10 + (d - ?0), b)\n defp parse_digits(n, b), do: cover({n, b})\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,492,480,492,468,null,null,2,null,null,null,null,null,null,null,null,497,null,null,8,null,null,null,null,null,null,359,4,null,355,null,null,null,126,6,null,null,null,null,null,null,480,null,null,6,null,null,null,null,null,null,null,null,271,2,null,269,null,null,null,100,97,10,null],"name":"lib/xgit/config_entry.ex","source":"defmodule Xgit.ConfigEntry do\n @moduledoc ~S\"\"\"\n Represents one entry in a git configuration dictionary.\n\n This is also commonly referred to as a \"config _line_\" because it typically\n occupies one line in a typical git configuration file.\n\n The semantically-important portion of a configuration file (i.e. everything\n except comments and whitespace) could be represented by a list of `ConfigEntry`\n structs.\n \"\"\"\n\n import Xgit.Util.ForceCoverage\n\n @typedoc ~S\"\"\"\n Represents an entry in a git config file.\n\n ## Struct Members\n\n * `section`: (`String`) section name for the entry\n * `subsection`: (`String` or `nil`) subsection name\n * `name`: (`String`) key name\n * `value`: (`String`, `nil`, or `:remove_all`) value\n * `nil` if the name is present without an `=`\n * `:remove_all` can be used as an instruction in some APIs to remove any corresponding entries\n \"\"\"\n @type t :: %__MODULE__{\n section: String.t(),\n subsection: String.t() | nil,\n name: String.t(),\n value: String.t() | :remove_all | nil\n }\n\n @enforce_keys [:section, :subsection, :name, :value]\n defstruct [:section, :name, subsection: nil, value: nil]\n\n @doc ~S\"\"\"\n Returns `true` if passed a valid config entry.\n \"\"\"\n @spec valid?(value :: any) :: boolean\n def valid?(%__MODULE__{} = entry) do\n valid_section?(entry.section) &&\n valid_subsection?(entry.subsection) &&\n valid_name?(entry.name) &&\n valid_value?(entry.value)\n end\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Returns `true` if passed a valid config section name.\n\n Only alphanumeric characters, `-`, and `.` are allowed in section names.\n \"\"\"\n @spec valid_section?(section :: any) :: boolean\n def valid_section?(section) when is_binary(section) do\n String.match?(section, ~r/^[-A-Za-z0-9.]+$/)\n end\n\n def valid_section?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Returns `true` if passed a valid config subsection name.\n \"\"\"\n @spec valid_subsection?(subsection :: any) :: boolean\n def valid_subsection?(subsection) when is_binary(subsection) do\n if String.match?(subsection, ~r/[\\0\\n]/) do\n cover false\n else\n cover true\n end\n end\n\n def valid_subsection?(nil), do: cover(true)\n def valid_subsection?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Returns `true` if passed a valid config entry name.\n \"\"\"\n @spec valid_name?(name :: any) :: boolean\n def valid_name?(name) when is_binary(name) do\n String.match?(name, ~r/^[A-Za-z][-A-Za-z0-9]*$/)\n end\n\n def valid_name?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Returns `true` if passed a valid config value string.\n\n Important: At this level, we do not accept other data types.\n \"\"\"\n @spec valid_value?(value :: any) :: boolean\n def valid_value?(value) when is_binary(value) do\n if String.match?(value, ~r/\\0/) do\n cover false\n else\n cover true\n end\n end\n\n def valid_value?(nil), do: cover(true)\n def valid_value?(:remove_all), do: cover(true)\n def valid_value?(_), do: cover(false)\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,2,null,null,null,null,null,34,null,null,null,null,null,null,15,null,null,null,49,49,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,45,null,null,null,30,null,null,null,3,null,null,null,12,null,null,null,null,null,null,null,null,null,null,null,null,null,null,11,null,null,null,15,null,null,4,1,null,null,10,2,1,1,1,1,1,1,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,16,null,null,null,null,22,null,null,4,1,17,null,2,2,2,1,1,null,2,2,2,1,1,null,6,null],"name":"lib/xgit/config.ex","source":"defmodule Xgit.Config do\n @moduledoc ~S\"\"\"\n Provides convenience functions to get specific configuration values from a repository.\n\n IMPORTANT: The on-disk repository implementation (`Xgit.Repository.OnDisk`) does not\n examine configuration directives in global or home directory.\n\n This module should be considered roughly analogous to the\n [`git config`](https://git-scm.com/docs/git-config) command.\n \"\"\"\n\n alias Xgit.Repository\n alias Xgit.Repository.Storage\n\n import Xgit.Util.ForceCoverage\n\n @doc ~S\"\"\"\n Get the list of strings for this variable.\n \"\"\"\n @spec get_string_list(\n repository :: Repository.t(),\n section :: String.t(),\n subsection :: String.t() | nil,\n name :: String.t()\n ) :: [String.t() | nil]\n def get_string_list(repository, section, subsection \\\\ nil, name)\n\n def get_string_list(repository, section, nil, name)\n when is_binary(section) and is_binary(name) do\n repository\n |> Storage.get_config_entries(section: section, name: name)\n |> entries_to_values()\n end\n\n def get_string_list(repository, section, subsection, name)\n when is_binary(section) and is_binary(subsection) and is_binary(name) do\n repository\n |> Storage.get_config_entries(section: section, subsection: subsection, name: name)\n |> entries_to_values()\n end\n\n defp entries_to_values(config_entries) do\n Enum.map(config_entries, fn %{value: value} ->\n cover value\n end)\n end\n\n @doc ~S\"\"\"\n If there is a single string for this variable, return it.\n\n If there are zero or multiple values for this variable, return `nil`.\n\n If there is exactly one value, but it was implied (missing `=`), return `:empty`.\n \"\"\"\n @spec get_string(\n repository :: Repository.t(),\n section :: String.t(),\n subsection :: String.t() | nil,\n name :: String.t()\n ) :: String.t() | nil | :empty\n def get_string(repository, section, subsection \\\\ nil, name) do\n repository\n |> get_string_list(section, subsection, name)\n |> single_string_value()\n end\n\n defp single_string_value([value]) when is_binary(value) do\n cover value\n end\n\n defp single_string_value([nil]) do\n cover :empty\n end\n\n defp single_string_value(_) do\n cover nil\n end\n\n @doc ~S\"\"\"\n Return the config value interpreted as an integer.\n\n Use `default` if it can not be interpreted as such.\n \"\"\"\n @spec get_integer(\n repository :: Repository.t(),\n section :: String.t(),\n subsection :: String.t() | nil,\n name :: String.t(),\n default :: integer()\n ) :: integer()\n def get_integer(repository, section, subsection \\\\ nil, name, default)\n when is_integer(default) do\n repository\n |> get_string(section, subsection, name)\n |> to_integer_or_default(default)\n end\n\n defp to_integer_or_default(nil, default), do: cover(default)\n defp to_integer_or_default(:empty, default), do: cover(default)\n\n defp to_integer_or_default(value, default) do\n case Integer.parse(value) do\n {n, \"\"} -> cover n\n {n, \"k\"} -> cover n * 1024\n {n, \"K\"} -> cover n * 1024\n {n, \"m\"} -> cover n * 1024 * 1024\n {n, \"M\"} -> cover n * 1024 * 1024\n {n, \"g\"} -> cover n * 1024 * 1024 * 1024\n {n, \"G\"} -> cover n * 1024 * 1024 * 1024\n _ -> cover default\n end\n end\n\n @doc ~S\"\"\"\n Return the config value interpreted as a boolean.\n\n Use `default` if it can not be interpreted as such.\n \"\"\"\n @spec get_boolean(\n repository :: Repository.t(),\n section :: String.t(),\n subsection :: String.t() | nil,\n name :: String.t(),\n default :: boolean()\n ) :: boolean()\n def get_boolean(repository, section, subsection \\\\ nil, name, default)\n when is_boolean(default) do\n repository\n |> get_string(section, subsection, name)\n |> to_lower_if_string()\n |> to_boolean_or_default(default)\n end\n\n defp to_lower_if_string(nil), do: cover(nil)\n defp to_lower_if_string(:empty), do: cover(:empty)\n defp to_lower_if_string(s) when is_binary(s), do: String.downcase(s)\n\n defp to_boolean_or_default(\"yes\", _default), do: cover(true)\n defp to_boolean_or_default(\"on\", _default), do: cover(true)\n defp to_boolean_or_default(\"true\", _default), do: cover(true)\n defp to_boolean_or_default(\"1\", _default), do: cover(true)\n defp to_boolean_or_default(:empty, _default), do: cover(true)\n\n defp to_boolean_or_default(\"no\", _default), do: cover(false)\n defp to_boolean_or_default(\"off\", _default), do: cover(false)\n defp to_boolean_or_default(\"false\", _default), do: cover(false)\n defp to_boolean_or_default(\"0\", _default), do: cover(false)\n defp to_boolean_or_default(\"\", _default), do: cover(false)\n\n defp to_boolean_or_default(_, default), do: cover(default)\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,69,64,null,null,5,null,null,null,null,null,null,null,null,null,null,null,null,null,48,null,null,20,14,6,3,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,56,null,null,2,null,null,62,null,51,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,54,null,null,3,null,null,null,null,50,null,50,49,null,1,null,null,null,null,59,56,56,56,56,56,56,55,null,null,null,null,null,55,null,4,null,null,null,1,null,null,null,1,null,null,335,null,1,null,56,null,null,null,null,null,null,null,null,69,null,null,null,null,null,69,null,null,null,null,null,null,null,null,102,null],"name":"lib/xgit/tree.ex","source":"defmodule Xgit.Tree do\n @moduledoc ~S\"\"\"\n Represents a git `tree` object in memory.\n \"\"\"\n alias Xgit.ContentSource\n alias Xgit.FileMode\n alias Xgit.FilePath\n alias Xgit.Object\n alias Xgit.ObjectId\n\n import Xgit.Util.ForceCoverage\n\n @typedoc ~S\"\"\"\n This struct describes a single `tree` object so it can be manipulated in memory.\n\n ## Struct Members\n\n * `:entries`: list of `Tree.Entry` structs, which must be sorted by name\n \"\"\"\n @type t :: %__MODULE__{entries: [__MODULE__.Entry.t()]}\n\n @enforce_keys [:entries]\n defstruct [:entries]\n\n defmodule Entry do\n @moduledoc ~S\"\"\"\n A single file in a `tree` structure.\n \"\"\"\n\n use Xgit.FileMode\n\n alias Xgit.FileMode\n alias Xgit.FilePath\n alias Xgit.ObjectId\n alias Xgit.Util.Comparison\n\n import Xgit.Util.ForceCoverage\n\n @typedoc ~S\"\"\"\n A single file in a tree structure.\n\n ## Struct Members\n\n * `name`: (`FilePath.t`) entry path name, relative to top-level directory (without leading slash)\n * `object_id`: (`ObjectId.t`) SHA-1 for the represented object\n * `mode`: (`FileMode.t`)\n \"\"\"\n @type t :: %__MODULE__{\n name: FilePath.t(),\n object_id: ObjectId.t(),\n mode: FileMode.t()\n }\n\n @enforce_keys [:name, :object_id, :mode]\n defstruct [:name, :object_id, :mode]\n\n @doc ~S\"\"\"\n Return `true` if this entry struct describes a valid tree entry.\n \"\"\"\n @spec valid?(entry :: any) :: boolean\n def valid?(entry)\n\n def valid?(\n %__MODULE__{\n name: name,\n object_id: object_id,\n mode: mode\n } = _entry\n )\n when is_list(name) and is_binary(object_id) and is_file_mode(mode) do\n FilePath.check_path_segment(name) == :ok && ObjectId.valid?(object_id) &&\n object_id != ObjectId.zero()\n end\n\n def valid?(_), do: cover(false)\n\n @doc ~S\"\"\"\n Compare two entries according to git file name sorting rules.\n\n ## Return Value\n\n * `:lt` if `entry1` sorts before `entry2`.\n * `:eq` if they are the same.\n * `:gt` if `entry1` sorts after `entry2`.\n \"\"\"\n @spec compare(entry1 :: t | nil, entry2 :: t) :: Comparison.result()\n def compare(entry1, entry2)\n\n def compare(nil, _entry2), do: cover(:lt)\n\n def compare(%{name: name1} = _entry1, %{name: name2} = _entry2) do\n cond do\n name1 < name2 -> cover :lt\n name2 < name1 -> cover :gt\n true -> cover :eq\n end\n end\n end\n\n @doc ~S\"\"\"\n Return `true` if the value is a tree struct that is valid.\n\n All of the following must be true for this to occur:\n * The value is a `Tree` struct.\n * The entries are properly sorted.\n * All entries are valid, as determined by `Xgit.Tree.Entry.valid?/1`.\n \"\"\"\n @spec valid?(tree :: any) :: boolean\n def valid?(tree)\n\n def valid?(%__MODULE__{entries: entries}) when is_list(entries) do\n Enum.all?(entries, &Entry.valid?/1) && entries_sorted?([nil | entries])\n end\n\n def valid?(_), do: cover(false)\n\n defp entries_sorted?([entry1, entry2 | tail]),\n do: Entry.compare(entry1, entry2) == :lt && entries_sorted?([entry2 | tail])\n\n defp entries_sorted?([_]), do: cover(true)\n\n @typedoc ~S\"\"\"\n Error response codes returned by `from_object/1`.\n \"\"\"\n @type from_object_reason :: :not_a_tree | :invalid_format | :invalid_tree\n\n @doc ~S\"\"\"\n Renders a tree structure from an `Xgit.Object`.\n\n ## Return Values\n\n `{:ok, tree}` if the object contains a valid `tree` object.\n\n `{:error, :not_a_tree}` if the object contains an object of a different type.\n\n `{:error, :invalid_format}` if the object says that is of type `tree`, but\n can not be parsed as such.\n\n `{:error, :invalid_tree}` if the object can be parsed as a tree, but the\n entries are not well formed or not properly sorted.\n \"\"\"\n @spec from_object(object :: Object.t()) :: {:ok, tree :: t} | {:error, from_object_reason}\n def from_object(object)\n\n def from_object(%Object{type: :tree, content: content} = _object) do\n content\n |> ContentSource.stream()\n |> Enum.to_list()\n |> from_object_internal([])\n end\n\n def from_object(%Object{} = _object), do: cover({:error, :not_a_tree})\n\n defp from_object_internal(data, entries_acc)\n\n defp from_object_internal([], entries_acc) do\n tree = %__MODULE__{entries: Enum.reverse(entries_acc)}\n\n if valid?(tree) do\n cover {:ok, tree}\n else\n cover {:error, :invalid_tree}\n end\n end\n\n defp from_object_internal(data, entries_acc) do\n with {:ok, file_mode, data} <- parse_file_mode(data, 0),\n true <- FileMode.valid?(file_mode),\n {name, [0 | data]} <- path_and_object_id(data),\n :ok <- FilePath.check_path_segment(name),\n {raw_object_id, data} <- Enum.split(data, 20),\n 20 <- Enum.count(raw_object_id),\n false <- Enum.all?(raw_object_id, &(&1 == 0)) do\n this_entry = %__MODULE__.Entry{\n name: name,\n mode: file_mode,\n object_id: ObjectId.from_binary_iodata(raw_object_id)\n }\n\n from_object_internal(data, [this_entry | entries_acc])\n else\n _ -> cover {:error, :invalid_format}\n end\n end\n\n defp parse_file_mode([], _mode), do: cover({:error, :invalid_mode})\n\n defp parse_file_mode([?\\s | data], mode), do: cover({:ok, mode, data})\n\n defp parse_file_mode([?0 | _data], 0), do: cover({:error, :invalid_mode})\n\n defp parse_file_mode([c | data], mode) when c >= ?0 and c <= ?7,\n do: parse_file_mode(data, mode * 8 + (c - ?0))\n\n defp parse_file_mode([_c | _data], _mode), do: cover({:error, :invalid_mode})\n\n defp path_and_object_id(data), do: Enum.split_while(data, &(&1 != 0))\n\n @doc ~S\"\"\"\n Renders this tree structure into a corresponding `Xgit.Object`.\n \"\"\"\n @spec to_object(tree :: t) :: Object.t()\n def to_object(tree)\n\n def to_object(%__MODULE__{entries: entries} = _tree) do\n rendered_entries =\n entries\n |> Enum.map(&entry_to_iodata/1)\n |> IO.iodata_to_binary()\n |> :binary.bin_to_list()\n\n %Object{\n type: :tree,\n content: rendered_entries,\n size: Enum.count(rendered_entries),\n id: ObjectId.calculate_id(rendered_entries, :tree)\n }\n end\n\n defp entry_to_iodata(%__MODULE__.Entry{name: name, object_id: object_id, mode: mode}),\n do: cover([FileMode.to_short_octal(mode), ?\\s, name, 0, ObjectId.to_binary_iodata(object_id)])\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,2,23,19,25,16,null,null,4,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,69,null,null,2,null,null,69,61,51,null,43,39,null,33,null,21,null,19,null,null,17,null,null,null,null,null,null,null,null,52,null,null,null,null,59,16,8,null,8,43,null,null,null,6,11,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,10,1,null,null,9,null,null,6,null,9,9,null,9,9,null,null,null,null,null,null,9,null,null,null,null,null,null,null],"name":"lib/xgit/commit.ex","source":"defmodule Xgit.Commit do\n @moduledoc ~S\"\"\"\n Represents a git `commit` object in memory.\n \"\"\"\n alias Xgit.ContentSource\n alias Xgit.Object\n alias Xgit.ObjectId\n alias Xgit.PersonIdent\n\n import Xgit.Util.ForceCoverage\n import Xgit.Util.ParseHeader, only: [next_header: 1]\n\n @typedoc ~S\"\"\"\n This struct describes a single `commit` object so it can be manipulated in memory.\n\n ## Struct Members\n\n * `:tree`: (`Xgit.ObjectId`) tree referenced by this commit\n * `:parents`: (list of `Xgit.ObjectId`) parent(s) of this commit\n * `:author`: (`Xgit.PersonIdent`) author of this commit\n * `:committer`: (`Xgit.PersonIdent`) committer for this commit\n * `:message`: (bytelist) user-entered commit message (encoding unspecified)\n\n **TO DO:** Support signatures and other extensions.\n https://github.com/elixir-git/xgit/issues/202\n \"\"\"\n @type t :: %__MODULE__{\n tree: ObjectId.t(),\n parents: [ObjectId.t()],\n author: PersonIdent.t(),\n committer: PersonIdent.t(),\n message: [byte]\n }\n\n @enforce_keys [:tree, :author, :committer, :message]\n defstruct [:tree, :author, :committer, :message, parents: []]\n\n @doc ~S\"\"\"\n Return `true` if the value is a commit struct that is valid.\n \"\"\"\n @spec valid?(commit :: any) :: boolean\n def valid?(commit)\n\n def valid?(%__MODULE__{\n tree: tree,\n parents: parents,\n author: %PersonIdent{} = author,\n committer: %PersonIdent{} = committer,\n message: message\n })\n when is_binary(tree) and is_list(parents) and is_list(message) do\n ObjectId.valid?(tree) &&\n Enum.all?(parents, &ObjectId.valid?(&1)) &&\n PersonIdent.valid?(author) &&\n PersonIdent.valid?(committer) &&\n not Enum.empty?(message)\n end\n\n def valid?(_), do: cover(false)\n\n @typedoc ~S\"\"\"\n Error response codes returned by `from_object/1`.\n \"\"\"\n @type from_object_reason :: :not_a_commit | :invalid_commit\n\n @doc ~S\"\"\"\n Renders a commit structure from an `Xgit.Object`.\n\n ## Return Values\n\n `{:ok, commit}` if the object contains a valid `commit` object.\n\n `{:error, :not_a_commit}` if the object contains an object of a different type.\n\n `{:error, :invalid_commit}` if the object says that is of type `commit`, but\n can not be parsed as such.\n \"\"\"\n @spec from_object(object :: Object.t()) :: {:ok, commit :: t} | {:error, from_object_reason}\n def from_object(object)\n\n def from_object(%Object{type: :commit, content: content} = _object) do\n content\n |> ContentSource.stream()\n |> Enum.to_list()\n |> from_object_internal()\n end\n\n def from_object(%Object{} = _object), do: cover({:error, :not_a_commit})\n\n defp from_object_internal(data) do\n with {:tree, {'tree', tree_id_str, data}} <- {:tree, next_header(data)},\n {:tree_id, {tree_id, []}} <- {:tree_id, ObjectId.from_hex_charlist(tree_id_str)},\n {:parents, {parents, data}} when is_list(data) <-\n {:parents, read_parents(data, [])},\n {:author, {'author', author_str, data}} <- {:author, next_header(data)},\n {:author_id, %PersonIdent{} = author} <-\n {:author_id, PersonIdent.from_byte_list(author_str)},\n {:committer, {'committer', committer_str, data}} <-\n {:committer, next_header(data)},\n {:committer_id, %PersonIdent{} = committer} <-\n {:committer_id, PersonIdent.from_byte_list(committer_str)},\n message when is_list(message) <- drop_if_lf(data) do\n # TO DO: Support signatures and other extensions.\n # https://github.com/elixir-git/xgit/issues/202\n cover {:ok,\n %__MODULE__{\n tree: tree_id,\n parents: parents,\n author: author,\n committer: committer,\n message: message\n }}\n else\n _ -> cover {:error, :invalid_commit}\n end\n end\n\n defp read_parents(data, parents_acc) do\n with {'parent', parent_id, next_data} <- next_header(data),\n {:parent_id, {parent_id, []}} <- {:parent_id, ObjectId.from_hex_charlist(parent_id)} do\n read_parents(next_data, [parent_id | parents_acc])\n else\n {:parent_id, _} -> cover :error\n _ -> cover {Enum.reverse(parents_acc), data}\n end\n end\n\n defp drop_if_lf([10 | data]), do: cover(data)\n defp drop_if_lf([]), do: cover([])\n defp drop_if_lf(_), do: cover(:error)\n\n @doc ~S\"\"\"\n Renders this commit structure into a corresponding `Xgit.Object`.\n\n If duplicate parents are detected, they will be silently de-duplicated.\n\n If the commit structure is not valid, will raise `ArgumentError`.\n \"\"\"\n @spec to_object(commit :: t) :: Object.t()\n def to_object(commit)\n\n def to_object(\n %__MODULE__{\n tree: tree,\n parents: parents,\n author: %PersonIdent{} = author,\n committer: %PersonIdent{} = committer,\n message: message\n } = commit\n ) do\n unless valid?(commit) do\n raise ArgumentError, \"Xgit.Commit.to_object/1: commit is not valid\"\n end\n\n rendered_parents =\n parents\n |> Enum.uniq()\n |> Enum.flat_map(&'parent #{&1}\\n')\n\n rendered_commit =\n 'tree #{tree}\\n' ++\n rendered_parents ++\n 'author #{PersonIdent.to_external_string(author)}\\n' ++\n 'committer #{PersonIdent.to_external_string(committer)}\\n' ++\n '\\n' ++\n message\n\n # TO DO: Support signatures and other extensions.\n # https://github.com/elixir-git/xgit/issues/202\n\n %Object{\n type: :commit,\n content: rendered_commit,\n size: Enum.count(rendered_commit),\n id: ObjectId.calculate_id(rendered_commit, :commit)\n }\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,528,527,524,524,524,null,4,null,null,null,null,528,527,527,null,null,null,null,null,null,null,null,null,null,null,null,527,526,526,525,524,null,1,1,null,1,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,7,null,null,null,7,1,null,6,null,null,null,1,null,null,null,null,null,null,null,null,6,6,6,null,null,null,null,null,null,6,6,6,6,6,6,6,6,6,null,null,null,null,null,null,null,null,6,null,null,null,null,null,null,null,null,null,null,null,6,null,null,null,null,null,6,null,null,null,null,null,6,null,null,null,null,null,6,6,null,null,6,null,null,null,null,null,null,null,null,null,null,null,null,null,null,6,6,6,null,null,null,null,null,null,6,null,6,6,6,6,null,null,null,null,null,null,null,null,null,14,16,null,14,null,null,null,16,null,null,null,null,null,null,null,16,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,1,null,null,null,null,null,null,986,115,null,null,null,null,null,null,178,161,5,12,null,null,null,null,null,null,null,null,236,null,null,null,236,null,null,null,null,null,null,null,236,230,218,null,6,12,null,null,null,230,null,null,null,null,2112,null,1370,230,null,1,null,null,null,null,null,null,223,null,6,null,2,null,null,220,218,2,null,null,null,1,null,null,null,218,null,null,null,null,null,null,null,null,null,192,192,null,192,null,189,null,null,185,null,185,null,null,3,null,null,4,null,null,null,null,185,185,null,185,185,null,185,180,null,5,5,null,null,185,null,null,null,null,null,null,55,null,5,null,null,425,610,null,null,null,null,null,57,null,null,null,null,57,null,null,44,null,null,null,null,null,null,44,null,43,null,null,null,null,73,72,null,72,72,null,null,65,null,59,null,7,7,null,null,null,15,null,null,58,null,58,1,null,57,null,null,null,null,56,null,5,null,null,51,null,null,null,21,null,67,null,null,12,3,9,null,null,null,null,12,4,null,null,null,null,null,65,65,null,65,null,65,62,null,3,null,null,null,null,null,19,null,null,15,null,9,null,4,2,4,null,null,null,null,15,15,null,null,null,null,47,null,19,null,null,4,null,null,24,null,null,null,null,43,null,5,null,null,38,null,null,null,null,132,null,132,51,2,71,8,null,null,null,null,97,null,96,96,94,null,2,1,null,null,null,null,null,null,null,14,14,null,null,null,null,null,null,null,null,7,null,null,null,null,null,7,6,1,null,null,null,null,null,3,3,null,null],"name":"lib/xgit/repository/on_disk.ex","source":"defmodule Xgit.Repository.OnDisk do\n @moduledoc ~S\"\"\"\n Implementation of `Xgit.Repository.Storage` that stores content on the\n local file system.\n\n _IMPORTANT NOTE:_ This is intended as a reference implementation largely\n for testing purposes and may not necessarily handle all of the edge cases that\n the traditional `git` command-line interface will handle.\n\n That said, it does intentionally use the same `.git` folder format as command-line\n `git` so that results may be compared for similar operations.\n \"\"\"\n use Xgit.Repository.Storage\n\n import Xgit.Util.ForceCoverage\n\n alias Xgit.ConfigEntry\n alias Xgit.ConfigFile\n alias Xgit.ContentSource\n alias Xgit.Object\n alias Xgit.Ref\n alias Xgit.Repository.WorkingTree\n alias Xgit.Util.FileUtils\n alias Xgit.Util.ParseDecimal\n alias Xgit.Util.UnzipStream\n\n @doc ~S\"\"\"\n Start an on-disk git repository.\n\n Use the functions in `Xgit.Repository.Storage` to interact with this repository process.\n\n An `Xgit.Repository.WorkingTree` will be automatically created and attached\n to this repository.\n\n ## Options\n\n * `:work_dir` (required): Top-level working directory. A `.git` directory should\n exist at this path. Use `create/1` to create an empty on-disk repository if\n necessary.\n\n Any other options are passed through to `GenServer.start_link/3`.\n\n ## Return Value\n\n See `GenServer.start_link/3`.\n\n `{:error, :work_dir_invalid}` if `work_dir` is missing or not a String.\n \"\"\"\n @spec start_link(work_dir: Path.t()) :: GenServer.on_start()\n def start_link(opts) do\n with {:ok, work_dir} <- get_work_dir_opt(opts),\n {:ok, repo} <- Storage.start_link(__MODULE__, work_dir, opts),\n {:ok, working_tree} <- WorkingTree.start_link(repo, work_dir),\n :ok <- Storage.set_default_working_tree({:xgit_repo, repo}, working_tree) do\n cover {:ok, repo}\n else\n err -> err\n end\n end\n\n defp get_work_dir_opt(opts) do\n with {:has_opt?, true} <- {:has_opt?, Keyword.has_key?(opts, :work_dir)},\n work_dir <- Keyword.get(opts, :work_dir),\n true <- is_binary(work_dir) do\n {:ok, work_dir}\n else\n {:has_opt?, _} -> {:error, :missing_arguments}\n _ -> {:error, :work_dir_invalid}\n end\n end\n\n @impl true\n def init(work_dir) when is_binary(work_dir) do\n # TO DO: Be smarter about bare repos and non-standard git_dir locations.\n # https://github.com/elixir-git/xgit/issues/44\n\n with {:work_dir, true} <- {:work_dir, File.dir?(work_dir)},\n git_dir <- Path.join(work_dir, \".git\"),\n {:git_dir, true} <- {:git_dir, File.dir?(git_dir)},\n {:ok, config_file} <- ConfigFile.start_link(Path.join(git_dir, \"config\")) do\n cover {:ok, %{work_dir: work_dir, git_dir: git_dir, config_file: config_file}}\n else\n {:work_dir, _} -> cover {:stop, :work_dir_doesnt_exist}\n {:git_dir, _} -> cover {:stop, :git_dir_doesnt_exist}\n :ignore -> :ignore\n {:error, error} -> cover {:error, error}\n end\n end\n\n @doc ~S\"\"\"\n Creates a new, empty git repository on the local file system.\n\n Analogous to [`git init`](https://git-scm.com/docs/git-init).\n\n _NOTE:_ We use the name `create` here so as to avoid a naming conflict with\n `c:GenServer.init/1`.\n\n ## Parameters\n\n `work_dir` (String) is the top-level working directory. A `.git` directory is\n created inside this directory.\n\n ## Return Value\n\n `:ok` if created successfully.\n\n `{:error, :work_dir_must_not_exist}` if `work_dir` already exists.\n \"\"\"\n @spec create(work_dir :: Path.t()) :: :ok | {:error, :work_dir_must_not_exist}\n def create(work_dir) when is_binary(work_dir) do\n work_dir\n |> assert_not_exists()\n |> create_empty_repo()\n end\n\n defp assert_not_exists(path) do\n if File.exists?(path) do\n cover {:error, :work_dir_must_not_exist}\n else\n cover {:ok, path}\n end\n end\n\n defp create_empty_repo({:error, reason}), do: cover({:error, reason})\n\n # Exception to the usual policy about using `cover` macro:\n # Most of these error cases are about I/O errors that are difficult\n # to simulate (can create parent repo dir, but then can't create\n # a child thereof, etc.). This code is un-complicated, so we\n # choose to leave it silently uncovered.\n\n defp create_empty_repo({:ok, path}) do\n with :ok <- File.mkdir_p(path),\n :ok <- create_git_dir(Path.join(path, \".git\")) do\n cover :ok\n else\n {:error, reason} -> {:error, reason}\n end\n end\n\n defp create_git_dir(git_dir) do\n with :ok <- File.mkdir_p(git_dir),\n :ok <- create_config(git_dir),\n :ok <- create_description(git_dir),\n :ok <- create_head(git_dir),\n :ok <- create_hooks_dir(git_dir),\n :ok <- create_info_dir(git_dir),\n :ok <- create_objects_dir(git_dir),\n :ok <- create_refs_dir(git_dir) do\n cover :ok\n else\n {:error, reason} -> {:error, reason}\n end\n end\n\n defp create_config(git_dir) do\n git_dir\n |> Path.join(\"config\")\n |> File.write(~s\"\"\"\n [core]\n \\trepositoryformatversion = 0\n \\tfilemode = true\n \\tbare = false\n \\tlogallrefupdates = true\n \"\"\")\n end\n\n defp create_description(git_dir) do\n git_dir\n |> Path.join(\"description\")\n |> File.write(\"Unnamed repository; edit this file 'description' to name the repository.\\n\")\n end\n\n defp create_head(git_dir) do\n git_dir\n |> Path.join(\"HEAD\")\n |> File.write(\"ref: refs/heads/master\\n\")\n end\n\n defp create_hooks_dir(git_dir) do\n git_dir\n |> Path.join(\"hooks\")\n |> File.mkdir_p()\n\n # NOTE: Intentionally not including the sample files.\n end\n\n defp create_info_dir(git_dir) do\n with info_dir <- Path.join(git_dir, \"info\"),\n :ok <- File.mkdir_p(info_dir) do\n info_dir\n |> Path.join(\"exclude\")\n |> File.write!(~S\"\"\"\n # git ls-files --others --exclude-from=.git/info/exclude\n # Lines that start with '#' are comments.\n # For a project mostly in C, the following would be a good set of\n # exclude patterns (uncomment them if you want to use them):\n # *.[oa]\n # *~\n .DS_Store\n \"\"\")\n else\n {:error, reason} -> {:error, reason}\n end\n end\n\n defp create_objects_dir(git_dir) do\n with :ok <- File.mkdir_p(Path.join(git_dir, \"objects/info\")),\n :ok <- File.mkdir_p(Path.join(git_dir, \"objects/pack\")) do\n cover :ok\n else\n {:error, reason} -> {:error, reason}\n end\n end\n\n defp create_refs_dir(git_dir) do\n refs_dir = Path.join(git_dir, \"refs\")\n\n with :ok <- File.mkdir_p(refs_dir),\n :ok <- File.mkdir_p(Path.join(refs_dir, \"heads\")),\n :ok <- File.mkdir_p(Path.join(refs_dir, \"tags\")) do\n cover :ok\n else\n {:error, reason} -> {:error, reason}\n end\n end\n\n ## --- Objects ---\n\n @impl true\n def handle_has_all_object_ids?(%{git_dir: git_dir} = state, object_ids) do\n has_all_object_ids? =\n Enum.all?(object_ids, fn object_id -> has_object_id?(git_dir, object_id) end)\n\n cover {:ok, has_all_object_ids?, state}\n end\n\n defp has_object_id?(git_dir, object_id) do\n loose_object_path =\n Path.join([\n git_dir,\n \"objects\",\n String.slice(object_id, 0, 2),\n String.slice(object_id, 2, 38)\n ])\n\n File.regular?(loose_object_path)\n end\n\n defmodule LooseObjectContentSource do\n @moduledoc false\n\n # Implements `Xgit.ContentSource` to read content from a loose object.\n\n import Xgit.Util.ForceCoverage\n\n @type t :: %__MODULE__{path: Path.t(), size: non_neg_integer}\n\n @enforce_keys [:path, :size]\n defstruct [:path, :size]\n\n defimpl Xgit.ContentSource do\n alias Xgit.Repository.OnDisk.LooseObjectContentSource, as: LCS\n @impl true\n def length(%LCS{size: size}), do: cover(size)\n\n @impl true\n def stream(%LCS{path: path}) do\n path\n |> File.stream!([:binary])\n |> UnzipStream.unzip()\n |> Stream.drop_while(&(&1 != 0))\n |> Stream.drop(1)\n end\n end\n end\n\n @impl true\n def handle_get_object(state, object_id) do\n case get_object_imp(state, object_id) do\n %Object{} = object -> {:ok, object, state}\n {:error, :not_found} -> {:error, :not_found, state}\n {:error, :invalid_object} -> {:error, :invalid_object, state}\n end\n end\n\n defp get_object_imp(%{git_dir: git_dir} = _state, object_id) do\n # Currently only checks for loose objects.\n # TO DO: Look for object in packs.\n # https://github.com/elixir-git/xgit/issues/52\n\n find_loose_object(git_dir, object_id)\n end\n\n defp find_loose_object(git_dir, object_id) do\n loose_object_path =\n Path.join([\n git_dir,\n \"objects\",\n String.slice(object_id, 0, 2),\n String.slice(object_id, 2, 38)\n ])\n\n with {:exists?, true} <- {:exists?, File.regular?(loose_object_path)},\n {:header, type, length} <- read_loose_object_prefix(loose_object_path) do\n loose_file_to_object(type, length, object_id, loose_object_path)\n else\n {:exists?, false} -> cover {:error, :not_found}\n :invalid_header -> cover {:error, :invalid_object}\n end\n end\n\n defp read_loose_object_prefix(path) do\n path\n |> File.stream!([:binary], 1000)\n |> UnzipStream.unzip()\n |> Stream.take(100)\n |> Stream.take_while(&(&1 != 0))\n |> Enum.to_list()\n |> Enum.split_while(&(&1 != ?\\s))\n |> parse_prefix_and_length()\n rescue\n ErlangError -> cover :invalid_header\n end\n\n @known_types ['blob', 'tag', 'tree', 'commit']\n @type_to_atom %{'blob' => :blob, 'tag' => :tag, 'tree' => :tree, 'commit' => :commit}\n\n defp parse_prefix_and_length({type, length}) when type in @known_types,\n do: parse_length(@type_to_atom[type], length)\n\n defp parse_prefix_and_length(_), do: cover(:invalid_header)\n\n defp parse_length(_type, ' '), do: cover(:invalid_header)\n\n defp parse_length(type, [?\\s | length]) do\n case ParseDecimal.from_decimal_charlist(length) do\n {length, []} when is_integer(length) and length >= 0 -> {:header, type, length}\n _ -> cover :invalid_header\n end\n end\n\n defp parse_length(_type, _length), do: cover(:invalid_header)\n\n defp loose_file_to_object(type, length, object_id, path)\n when is_atom(type) and is_integer(length) do\n %Object{\n type: type,\n size: length,\n id: object_id,\n content: %__MODULE__.LooseObjectContentSource{size: length, path: path}\n }\n end\n\n @impl true\n def handle_put_loose_object(%{git_dir: git_dir} = state, %Object{id: id} = object) do\n object_dir = Path.join([git_dir, \"objects\", String.slice(id, 0, 2)])\n path = Path.join(object_dir, String.slice(id, 2, 38))\n\n with {:mkdir, :ok} <-\n {:mkdir, File.mkdir_p(object_dir)},\n {:file, {:ok, :ok}} <-\n {:file,\n File.open(path, [:write, :binary, :exclusive], fn file_pid ->\n deflate_and_write(file_pid, object)\n end)} do\n cover {:ok, state}\n else\n {:mkdir, _} ->\n {:error, :cant_create_file, state}\n\n {:file, {:error, :eexist}} ->\n {:error, :object_exists, state}\n end\n end\n\n defp deflate_and_write(file, %Object{type: type, size: size, content: content}) do\n z = :zlib.open()\n :ok = :zlib.deflateInit(z, 1)\n\n deflate_and_write_bytes(file, z, '#{type} #{size}')\n deflate_and_write_bytes(file, z, [0])\n\n if is_list(content) do\n deflate_and_write_bytes(file, z, content, :finish)\n else\n deflate_content(file, z, content)\n deflate_and_write_bytes(file, z, [], :finish)\n end\n\n :zlib.deflateEnd(z)\n end\n\n defp deflate_content(file, z, content) do\n content\n |> ContentSource.stream()\n |> Stream.each(fn chunk ->\n deflate_and_write_bytes(file, z, [chunk])\n end)\n |> Stream.run()\n end\n\n defp deflate_and_write_bytes(file, z, bytes, flush \\\\ :none),\n do: IO.binwrite(file, :zlib.deflate(z, bytes, flush))\n\n ## --- References ---\n\n @impl true\n def handle_list_refs(%{git_dir: git_dir} = state) do\n refs_dir = Path.join(git_dir, \"refs\")\n\n # TO DO: Add support for packed refs.\n # https://github.com/elixir-git/xgit/issues/223\n\n {:ok,\n refs_dir\n |> FileUtils.recursive_files!()\n |> Task.async_stream(fn path -> ref_path_to_ref(git_dir, path) end)\n |> Enum.map(&drop_ref_ok_tuple/1)\n |> Enum.filter(& &1)\n |> Enum.sort(), state}\n end\n\n defp ref_path_to_ref(git_dir, path),\n do: File.open!(path, &read_ref_imp(String.replace_prefix(path, \"#{git_dir}/\", \"\"), &1))\n\n defp drop_ref_ok_tuple({:ok, %Ref{} = value}), do: value\n defp drop_ref_ok_tuple(_), do: nil\n\n @impl true\n def handle_put_ref(%{git_dir: git_dir} = state, %Ref{name: name, target: target} = ref, opts) do\n with :ok <- verify_target(state, target),\n {:deref, new_name} <-\n {:deref, deref_sym_link(git_dir, name, Keyword.get(opts, :follow_link?, true))},\n ref <- %{ref | name: new_name},\n {:old_target_matches?, true} <-\n {:old_target_matches?,\n old_target_matches?(git_dir, new_name, Keyword.get(opts, :old_target))},\n :ok <- put_ref_imp(git_dir, ref) do\n # TO DO: Update ref log if so requested. https://github.com/elixir-git/xgit/issues/224\n cover {:ok, state}\n else\n {:error, reason} -> cover {:error, reason, state}\n {:old_target_matches?, _} -> cover {:error, :old_target_not_matched, state}\n end\n end\n\n defp verify_target(_state, \"ref: \" <> _), do: cover(:ok)\n\n defp verify_target(state, target) do\n object = get_object_imp(state, target)\n\n if object == {:error, :not_found} do\n cover {:error, :target_not_found}\n else\n cover :ok\n end\n end\n\n defp deref_sym_link(git_dir, ref_name, true = _follow_link?) do\n case get_ref_imp(git_dir, ref_name, false) do\n {:ok, %Ref{target: \"ref: \" <> link_target}} when link_target != ref_name ->\n deref_sym_link(git_dir, link_target, true)\n\n _ ->\n ref_name\n end\n end\n\n defp deref_sym_link(_git_dir, ref_name, _follow_link?), do: cover(ref_name)\n\n defp old_target_matches?(_git_dir, _name, nil), do: cover(true)\n\n defp old_target_matches?(git_dir, name, :new) do\n case get_ref_imp(git_dir, name, false) do\n {:ok, _ref} -> cover false\n _ -> cover true\n end\n end\n\n defp old_target_matches?(git_dir, name, old_target) do\n case get_ref_imp(git_dir, name, false) do\n {:ok, %Ref{target: ^old_target}} -> cover true\n _ -> false\n end\n end\n\n defp put_ref_imp(git_dir, %Ref{name: name, target: target} = _ref) do\n ref_path = Path.join(git_dir, name)\n ref_dir = Path.dirname(ref_path)\n\n mkdir_result = File.mkdir_p(ref_dir)\n\n if mkdir_result == :ok do\n File.write(ref_path, \"#{target}\\n\")\n else\n cover mkdir_result\n end\n end\n\n @impl true\n def handle_delete_ref(%{git_dir: git_dir} = state, name, opts) do\n with {:old_target_matches?, true} <-\n {:old_target_matches?,\n old_target_matches?(git_dir, name, Keyword.get(opts, :old_target))},\n :ok <- delete_ref_imp(git_dir, name) do\n # TO DO: Update ref log if so requested. https://github.com/elixir-git/xgit/issues/224\n cover {:ok, state}\n else\n {:old_target_matches?, _} -> cover {:error, :old_target_not_matched, state}\n {:error, :enoent} -> cover {:ok, state}\n {:error, _posix} -> cover {:error, :cant_delete_file, state}\n end\n end\n\n defp delete_ref_imp(git_dir, name) do\n ref_path = Path.join(git_dir, name)\n File.rm(ref_path)\n end\n\n @impl true\n def handle_get_ref(%{git_dir: git_dir} = state, name, opts) do\n case get_ref_imp(git_dir, name, Keyword.get(opts, :follow_link?, true)) do\n {:ok, %Ref{name: ^name} = ref} ->\n cover {:ok, ref, state}\n\n {:ok, %Ref{name: link_target} = ref} ->\n cover {:ok, %{ref | link_target: link_target, name: name}, state}\n\n {:error, reason} ->\n cover {:error, reason, state}\n end\n end\n\n defp get_ref_imp(git_dir, name, true = _follow_link?) do\n case get_ref_imp(git_dir, name, false) do\n {:ok, %Ref{target: \"ref: \" <> link_target}} when link_target != name ->\n get_ref_imp(git_dir, link_target, true)\n\n x ->\n x\n end\n end\n\n defp get_ref_imp(git_dir, name, _follow_link?) do\n ref_path = Path.join(git_dir, name)\n\n case File.open(ref_path, [:read], &read_ref_imp(name, &1)) do\n {:ok, %Ref{} = ref} -> cover {:ok, ref}\n {:ok, reason} when is_atom(reason) -> cover {:error, reason}\n {:error, :enoent} -> cover {:error, :not_found}\n {:error, reason} -> cover {:error, reason}\n end\n end\n\n defp read_ref_imp(name, iodevice) do\n with target when is_binary(target) <-\n IO.read(iodevice, 1024),\n ref <- %Ref{name: name, target: String.trim(target)},\n {:valid_ref?, true} <- {:valid_ref?, Ref.valid?(ref)} do\n ref\n else\n {:valid_ref?, false} -> cover :invalid_ref\n reason when is_atom(reason) -> cover reason\n end\n end\n\n ## --- Config ---\n\n @impl true\n def handle_get_config_entries(%{config_file: config_file} = state, opts) do\n entries = ConfigFile.get_entries(config_file, opts)\n cover {:ok, entries, state}\n end\n\n @impl true\n def handle_add_config_entry(\n %{config_file: config_file} = state,\n %ConfigEntry{section: section, subsection: subsection, name: name, value: value} = _entry,\n opts\n ) do\n opts =\n opts\n |> Keyword.put(:section, section)\n |> Keyword.put(:subsection, subsection)\n |> Keyword.put(:name, name)\n\n case ConfigFile.update(config_file, value, opts) do\n :ok -> cover {:ok, state}\n {:error, reason} -> cover {:error, reason, state}\n end\n end\n\n @impl true\n def handle_remove_config_entries(%{config_file: config_file} = state, opts) do\n :ok = ConfigFile.remove_entries(config_file, opts)\n cover {:ok, state}\n end\nend"},{"coverage":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,2,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"name":"lib/xgit/repository/test/config_test.ex","source":"defmodule Xgit.Repository.Test.ConfigTest do\n @moduledoc false\n\n # Not normally part of the public API, but available for implementors of\n # `Xgit.Repository.Storage` behaviour modules. Tests the callbacks related to\n # `Xgit.ConfigEntry` to ensure correct implementation of the core contracts.\n # Other tests may be necessary to ensure interop. (For example, the on-disk\n # repository test code adds more tests to ensure correct interop with\n # command-line git.)\n\n # Users of this module must provide a `setup` callback that provides a\n # `repo` member. This repository may be of any type, but should be \"empty.\"\n # An empty repo has the same data structures as an on-disk repo created\n # via `git init` in a previously-empty directory.\n\n # IMPORTANT: We assume that the repo is initialized with a minimal configuration\n # that corresponds to the following:\n\n # [core]\n # repositoryformatversion = 0\n # filemode = true\n # bare = false\n # logallrefupdates = true\n\n # The official definition for this is located in on_disk_repo_test_case.ex,\n # private function rewrite_config/1.\n\n import Xgit.Util.SharedTestCase\n\n define_shared_tests do\n alias Xgit.ConfigEntry\n alias Xgit.Repository.Storage\n\n describe \"get_config_entries/2\" do\n test \"default case returns expected initial case\", %{repo: repo} do\n assert [_ | _] = config_entries = Storage.get_config_entries(repo)\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"bare\", value: \"false\"},\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"true\"},\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"logallrefupdates\",\n value: \"true\"\n },\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"repositoryformatversion\",\n value: \"0\"\n }\n ] = Enum.sort(config_entries)\n end\n\n test \"can filter by section\", %{repo: repo} do\n assert [_ | _] = config_entries = Storage.get_config_entries(repo, section: \"core\")\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"bare\", value: \"false\"},\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"true\"},\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"logallrefupdates\",\n value: \"true\"\n },\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"repositoryformatversion\",\n value: \"0\"\n }\n ] = Enum.sort(config_entries)\n end\n\n test \"can filter by subsection\", %{repo: repo} do\n assert [] =\n _config_entries =\n Storage.get_config_entries(repo, section: \"core\", subsection: \"mumble\")\n end\n\n test \"can filter by section + name\", %{repo: repo} do\n assert [_ | _] =\n config_entries = Storage.get_config_entries(repo, section: \"core\", name: \"bare\")\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"bare\", value: \"false\"}\n ] = Enum.sort(config_entries)\n end\n end\n\n describe \"add_config_entry/3\" do\n test \"basic case with default options\", %{repo: repo} do\n assert :ok =\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"filemode\",\n value: \"true\"\n }\n )\n\n config_entries = Storage.get_config_entries(repo)\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"bare\", value: \"false\"},\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"true\"},\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"logallrefupdates\",\n value: \"true\"\n },\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"repositoryformatversion\",\n value: \"0\"\n }\n ] = Enum.sort(config_entries)\n end\n\n test \"add?: true\", %{repo: repo} do\n # Yes, this example is nonsense.\n\n assert :ok =\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"filemode\",\n value: \"false\"\n },\n add?: true\n )\n\n config_entries =\n Storage.get_config_entries(repo,\n section: \"core\",\n subsection: nil,\n name: \"filemode\"\n )\n\n # Spec is agnostic as to whether new items get inserted at end of overall list\n # or elsewhere; only that the values for this entry must be sorted in the order added.\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"true\"},\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"false\"}\n ] = config_entries\n end\n\n test \"replace_all?: true\", %{repo: repo} do\n # Build upon previous nonsense example. Have multiple values and then replace them all.\n\n assert :ok =\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"filemode\",\n value: \"false\"\n },\n add?: true\n )\n\n # Not testing output; duplicates previous test.\n\n assert :ok =\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"filemode\",\n value: \"42\"\n },\n replace_all?: true\n )\n\n config_entries =\n Storage.get_config_entries(repo,\n section: \"core\",\n subsection: nil,\n name: \"filemode\"\n )\n\n # Spec is agnostic as to whether new items get inserted at end of overall list\n # or elsewhere; only that the values for this entry must be sorted in the order added.\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"42\"}\n ] = config_entries\n end\n\n test \"error: replacing multivar\", %{repo: repo} do\n # Build upon previous nonsense example. Have multiple values and then\n # attempt to replace them all but without the replace_all?: true flag.\n\n assert :ok =\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"filemode\",\n value: \"false\"\n },\n add?: true\n )\n\n # Not testing output; duplicates previous test.\n\n assert {:error, :replacing_multivar} =\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"filemode\",\n value: \"42\"\n }\n )\n\n config_entries =\n Storage.get_config_entries(repo,\n section: \"core\",\n subsection: nil,\n name: \"filemode\"\n )\n\n # Spec is agnostic as to whether new items get inserted at end of overall list\n # or elsewhere; only that the values for this entry must be sorted in the order added.\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"true\"},\n %ConfigEntry{section: \"core\", subsection: nil, name: \"filemode\", value: \"false\"}\n ] = config_entries\n end\n\n test \"error: invalid entry\", %{repo: repo} do\n assert_raise ArgumentError,\n \"Xgit.Repository.Storage.add_config_entry/3: entry is invalid\",\n fn ->\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"no spaces allowed\",\n subsection: nil,\n name: \"filemode\",\n value: \"true\"\n }\n )\n end\n end\n end\n\n describe \"remove_config_entries/2\" do\n test \"basic case without options (remove everything)\", %{repo: repo} do\n assert :ok = Storage.remove_config_entries(repo)\n assert [] = Storage.get_config_entries(repo)\n end\n\n test \"basic case: remove by section\", %{repo: repo} do\n assert :ok =\n Storage.add_config_entry(\n repo,\n %ConfigEntry{\n section: \"other\",\n subsection: nil,\n name: \"filemode\",\n value: \"false\"\n }\n )\n\n assert :ok = Storage.remove_config_entries(repo, section: \"core\")\n\n config_entries = Storage.get_config_entries(repo)\n\n assert [\n %ConfigEntry{\n section: \"other\",\n subsection: nil,\n name: \"filemode\",\n value: \"false\"\n }\n ] = Enum.sort(config_entries)\n end\n\n test \"basic case: remove specific variable\", %{repo: repo} do\n assert :ok = Storage.remove_config_entries(repo, section: \"core\", name: \"filemode\")\n\n config_entries = Storage.get_config_entries(repo)\n\n assert [\n %ConfigEntry{section: \"core\", subsection: nil, name: \"bare\", value: \"false\"},\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"logallrefupdates\",\n value: \"true\"\n },\n %ConfigEntry{\n section: \"core\",\n subsection: nil,\n name: \"repositoryformatversion\",\n value: \"0\"\n }\n ] = Enum.sort(config_entries)\n end\n end\n end\nend"}]}<<<<<< EOF # path=./lib/xgit/util/force_coverage.ex defmodule Xgit.Util.ForceCoverage do @moduledoc false # This module is intended for internal testing purposes only. # We use it to wrap literal returns from functions in a way that # makes them visible to code coverage tools. # When building dev or production releases, we use a more efficient # form; when building for test (i.e. coverage), we use a more # complicated form that defeats compiler inlining. # Inspired by discussion at # https://elixirforum.com/t/functions-returning-a-literal-are-not-seen-by-code-coverage/16812. # coveralls-ignore-start if Application.get_env(:xgit, :use_force_coverage?) do defmacro cover(false = x) do quote do inspect(unquote(x)) unquote(x) end end defmacro cover(nil = x) do quote do inspect(unquote(x)) unquote(x) end end defmacro cover(value) do quote do # credo:disable-for-next-line Credo.Check.Warning.BoolOperationOnSameValues false or unquote(value) end end else defmacro cover(value) do quote do unquote(value) end end end # coveralls-ignore-stop end <<<<<< EOF